branch_name stringclasses 149 values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38 values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/master | <repo_name>wysohn/HideOres<file_sep>/HideOres/src/com/hideores/utils/BlockHider.java
package com.hideores.utils;
import com.hideores.main.HideOres;
import com.hideores.main.HideOresConfigs;
public class BlockHider {
public static HideOresConfigs config = HideOres.config;
public static void checkAndCopy(byte[] original, boolean newDataSystem) {
//LER init
LittleEndianReader LER = new LittleEndianReader(original);
if(newDataSystem){
//while has bytes to read
while(LER.getRemaining() > 0){
//a single data
short data = LER.readShort();
//extract type and meta from data
int type = (data >> 4);
//int meta = (data & 0x000F);
//if ore
if (config.getOres().contains(type)) {
int replacingType;//replacing block //TODO: make replacing block list
int replacingMeta = 0;//replacing meta
if(config.getReplacingBlocks().size() == 0)//set to 1 if nothing inside the list
replacingType = 1;
else{
int random = (int) (Math.random() * (config.getReplacingBlocks().size()));
replacingType = config.getReplacingBlocks().get(random);//get a random block from list
}
short combine = 0;
//backup
combine = (short) ((replacingType << 4) | replacingMeta);//combine type and meta
//Bukkit.getLogger().log(Level.INFO,"type ["+type+"] found and changed into ["+replacingType+"]");
data = combine;//replace previous data
LER.putShort(data);//write new data
//Bukkit.getLogger().info("sky: "+skylight+" block: "+blocklight
// +" type: "+replacingType+" meta: "+replacingMeta);
}
}
}else{
while(LER.getRemaining() > 0){
byte data = LER.readByte();
//Bukkit.getLogger().log(Level.INFO,"type ["+data+"] found");
//if ore
if (config.getOres().contains(Integer.valueOf(data))) {
int replacingType;//replacing block //TODO: make replacing block list
if(config.getReplacingBlocks().size() == 0)//set to 1 if nothing inside the list
replacingType = 1;
else{
int random = (int) (Math.random() * (config.getReplacingBlocks().size()));
replacingType = config.getReplacingBlocks().get(random);//get a random block from list
}
//Bukkit.getLogger().log(Level.INFO,"type ["+data+"] found and changed into ["+replacingType+"]");
data = (byte) replacingType;//replace previous data
LER.putByte(data);//write new data
}
}
}
System.arraycopy(LER.toByteArray(), 0, original, 0, original.length);
}
public static void checkAndCopy(char[] original){
byte[] b = new byte[original.length];
int i = 0;
for(char c : original){
b[i] = (byte) c; i++;
}
System.arraycopy(b, 0, original, 0, original.length);
}
}
<file_sep>/HideOres/src/com/hideores/core/v1_7_R2/PlayerHooker.java
package com.hideores.core.v1_7_R2;
import org.bukkit.craftbukkit.v1_7_R2.entity.CraftPlayer;
import org.bukkit.entity.Player;
import com.hideores.core.Iinstance.IPlayerHooker;
import com.hideores.utils.ReflectionHelper;
public class PlayerHooker implements IPlayerHooker{
public void hookPlayer(Player player){
CraftPlayer p = (CraftPlayer) player;
ReflectionHelper.setPrivateFinal(p.getHandle(), "chunkCoordIntPairQueue", new ChunkCoordQueue(p));
}
}
<file_sep>/HideOres/src/com/hideores/main/HideOresConfigs.java
package com.hideores.main;
import java.util.ArrayList;
import java.util.List;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.plugin.java.JavaPlugin;
public class HideOresConfigs {
private FileConfiguration config;
private JavaPlugin plugin;
public FileConfiguration getConfig() {
return config;
}
private List<Integer> ores = new ArrayList<Integer>(){{add(56);}};
private List<Integer> replacingBlocks = new ArrayList<Integer>(){{add(1);add(3);}};
private int radius = 6;
public HideOresConfigs(JavaPlugin plugin){
this.config = plugin.getConfig();
this.plugin = plugin;
config.options().copyDefaults(true);
config.addDefault("ores", ores);
config.addDefault("replacingBlocks", replacingBlocks);
config.addDefault("radius", radius);
loadConfigs();
plugin.saveConfig();
}
public void loadConfigs(){
ores = config.getIntegerList("ores");
replacingBlocks = config.getIntegerList("replacingBlocks");
}
public void saveConfigs(){
config.set("ores", this.getOres());
config.set("replacingBlocks", this.getReplacingBlocks());
plugin.saveConfig();
}
public List<Integer> getOres() {
return ores;
}
public void setOres(List<Integer> ores) {
this.ores = ores;
}
public List<Integer> getReplacingBlocks() {
return replacingBlocks;
}
public void setReplacingBlocks(List<Integer> replacingBlocks) {
this.replacingBlocks = replacingBlocks;
}
public int getRadius() {
return radius;
}
public void setRadius(int radius) {
this.radius = radius;
}
}
<file_sep>/HideOres/src/com/hideores/core/v1_8_R2/MapChunkCalculation.java
package com.hideores.core.v1_8_R2;
import java.util.Map;
import org.bukkit.World;
import com.hideores.cache.ChunkCoord;
import com.hideores.cache.ChunkMapCache;
import com.hideores.core.Iinstance.IMapChunkCalculation;
import com.hideores.main.HideOres;
import com.hideores.utils.BlockHider;
import com.hideores.utils.ReflectionHelper;
import net.minecraft.server.v1_8_R2.PacketPlayOutMapChunk;
import net.minecraft.server.v1_8_R2.PacketPlayOutMapChunk.ChunkMap;
import net.minecraft.server.v1_8_R2.PacketPlayOutMapChunkBulk;
public class MapChunkCalculation implements IMapChunkCalculation{
public static void calcAndChange(World world, PacketPlayOutMapChunk packet){
int x = (int) ReflectionHelper.getPrivateField(packet, "a");
int z = (int) ReflectionHelper.getPrivateField(packet, "b");
//get original chunk map c from packet
ChunkMap originalChunkMap = (ChunkMap) ReflectionHelper.getPrivateField(packet, "c");
//make a copy of chunk map
ChunkMap newChunkMap = new ChunkMap();
newChunkMap.b = originalChunkMap.b;
//calculate and change new chunk map
ChunkCoord coord = new ChunkCoord(world.getName(),x,z);
if((newChunkMap.a = HideOres.getCacheManager().getCache(coord)) == null){
newChunkMap.a = originalChunkMap.a.clone();
BlockHider.checkAndCopy(newChunkMap.a, true);
HideOres.getCacheManager().putCache(coord, newChunkMap.a.clone());
}
//put new chunk map to packet
ReflectionHelper.setPrivateField(packet, "c", newChunkMap);
}
public static void calcAndChange(World world, PacketPlayOutMapChunkBulk packet){
int[] xArray = (int[]) ReflectionHelper.getPrivateField(packet, "a");
int[] zArray = (int[]) ReflectionHelper.getPrivateField(packet, "b");
ChunkMap[] originalChunkMapArray = (ChunkMap[]) ReflectionHelper.getPrivateField(packet, "c");
ChunkMap[] newChunkMapArray = new ChunkMap[originalChunkMapArray.length];
int index = 0;
for(ChunkMap originalChunkMap : originalChunkMapArray){
int x = xArray[index];
int z = zArray[index];
//make a copy of chunk map
ChunkMap newChunkMap = new ChunkMap();
newChunkMap.b = originalChunkMap.b;
//calculate and change new chunk map
ChunkCoord coord = new ChunkCoord(world.getName(),x,z);
if((newChunkMap.a = HideOres.getCacheManager().getCache(coord)) == null){
newChunkMap.a = originalChunkMap.a.clone();
BlockHider.checkAndCopy(newChunkMap.a, true);
HideOres.getCacheManager().putCache(coord, newChunkMap.a.clone());
}
//put it into new array
newChunkMapArray[index] = newChunkMap; index++;
}
//put new chunk map array to packet
ReflectionHelper.setPrivateField(packet, "c", newChunkMapArray);
}
}
<file_sep>/HideOres/src/com/hideores/core/Iinstance/IBlockNotifier.java
package com.hideores.core.Iinstance;
import org.bukkit.block.Block;
import org.bukkit.entity.Player;
public interface IBlockNotifier {
public void notifyBlock(Player player, double x, double y, double z);
public boolean canSee(Block block);
}
<file_sep>/README.md
# HideOres
hmm
<file_sep>/HideOres/src/com/hideores/core/Iinstance/IChunkCoordQueue.java
package com.hideores.core.Iinstance;
public interface IChunkCoordQueue {
}
<file_sep>/HideOres/src/com/hideores/utils/LittleEndianReader.java
package com.hideores.utils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class LittleEndianReader{
private ByteBuffer bytebuffer;
public LittleEndianReader(byte[] data){
bytebuffer = ByteBuffer.wrap(data);
bytebuffer.order(ByteOrder.LITTLE_ENDIAN);
}
public byte readByte(){
return bytebuffer.get();
}
public short readShort(){
return bytebuffer.getShort();
}
public void putByte(byte b){
bytebuffer.position(bytebuffer.position() - 1);
bytebuffer.put(b);
}
public void putShort(short s){
bytebuffer.position(bytebuffer.position() - 2);
bytebuffer.putShort(s);
}
public void reset(){
bytebuffer.rewind();
}
public int getRemaining(){
return bytebuffer.remaining();
}
public byte[] toByteArray(){
return bytebuffer.array();
}
}
| d0db600142dad67e2e694a1008c11d3dfc842e8e | [
"Markdown",
"Java"
] | 8 | Java | wysohn/HideOres | 21e84e2d06f2b655c95c21b5558fae3e2dddb726 | 263e37f84ae41e1ac4ec93766af2500479392abe |
refs/heads/master | <file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.AI;
public class EnemySight : MonoBehaviour
{
public bool playerInSight = false;
public float fieldOfView = 110;
public Vector3 alertPosition = Vector3.zero;
private Animator playerAnim;
private NavMeshAgent navAgent;
private SphereCollider collider;
private Vector3 preLastPlayerPosition;
void Awake()
{
playerAnim = GameObject.FindGameObjectWithTag(Tags.player).GetComponent<Animator>();
navAgent = GetComponent<NavMeshAgent>();
collider = GetComponent<SphereCollider>();
}
void Start()
{
preLastPlayerPosition = GameController._instance.lastPlayerPosition;
}
void Update()
{
if(GameController._instance.lastPlayerPosition != preLastPlayerPosition)
{
alertPosition = GameController._instance.lastPlayerPosition;
preLastPlayerPosition = GameController._instance.lastPlayerPosition;
}
}
public void OnTriggerStay(Collider other)
{
if(other.tag == Tags.player)
{
Vector3 forward = transform.forward;
Vector3 playerDir = other.transform.position - transform.position;
float temp = Vector3.Angle(forward, playerDir);
RaycastHit hitInfo;
bool res = Physics.Raycast(transform.position + Vector3.up, other.transform.position - transform.position, out hitInfo);
if(temp < 0.5f * fieldOfView && (res == false||hitInfo.collider.tag == Tags.player))
{
playerInSight = true;
alertPosition = other.transform.position;
GameController._instance.SeePlayer(other.transform);
}
else
{
playerInSight = false;
}
//判断敌人能否接受到玩家的脚步声,声音传播轨迹绕过障碍物寻路
if (playerAnim.GetCurrentAnimatorStateInfo(0).IsName("Locomotion"))
{
NavMeshPath path = new NavMeshPath();
if (navAgent.CalculatePath(other.transform.position, path))
{
Vector3[] wayPoints = new Vector3[path.corners.Length + 2];
wayPoints[0] = transform.position;
wayPoints[wayPoints.Length - 1] = other.transform.position;
for(int i = 0; i < path.corners.Length; i++)
{
wayPoints[i + 1] = path.corners[i];
}
float length = 0;
for (int i = 1; i < wayPoints.Length; i++)
{
length += (wayPoints[i] - wayPoints[i - 1]).magnitude;
}
if(length < collider.radius)//若声音轨迹长度小于触发器的直径,则能接收到声音
{
alertPosition = other.transform.position;
}
}
}
}
}
public void OnTriggerExit(Collider other)
{
if (other.tag == Tags.player)
{
playerInSight = false;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class HoverPad : MonoBehaviour
{
public float hoverForce;
void OnTriggerEnter(Collider other)
{
Debug.Log("Object entered the trigger");
}
void OnTriggerStay(Collider other)
{
other.GetComponent<Rigidbody>().AddForce(Vector3.up * hoverForce, ForceMode.Acceleration);
}
void OnTriggerExit(Collider other)
{
Debug.Log("Object exited the trigger");
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class VoxAmin : MonoBehaviour
{
public AnimationCurve ac;
Vector3 s;
public float playSpeed = 3f;
private float timeOffet = 0;
// Start is called before the first frame update
void Start()
{
s = transform.localScale;
timeOffet = Random.value;
}
// Update is called once per frame
void Update()
{
timeOffet += Time.deltaTime;
float r = ac.Evaluate(timeOffet * playSpeed);
transform.localScale = new Vector3(s.x, s.y * r, s.z);
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace 观察者设计模式_猫捉老鼠
{
class Mouse
{
private string name;
private string color;
public Mouse(string name, string color,Cat cat)
{
this.name = name;
this.color = color;
cat.catCome += this.RunAway; //把自身的逃跑方法注册进猫里面
}
public void RunAway()
{
Console.WriteLine(color + "的老鼠" + name + "说:老猫来了,快跑");
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Biker : MonoBehaviour
{
private Animator anim;
// Start is called before the first frame update
void Start()
{
anim = GetComponent<Animator>();
}
// Update is called once per frame
void Update()
{
float v = Input.GetAxisRaw("Vertical");
anim.SetInteger("Vertical", (int)v);
//transform.Translate(Vector3.forward * v * Time.deltaTime * 4);
}
}
<file_sep>CloudSkybox
===========
*CloudSkybox* is an extention for Unity's default procedural skybox shader
that draws clouds with a volumetric rendering technique.


System Requirements
-------------------
- Unity 5.3 or later
- A GPU which supports SM 3.0
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Playables;
public class Player : MonoBehaviour
{
private Animator anim;
private int speedID = Animator.StringToHash("Speed");
private int isSpeedupID = Animator.StringToHash("IsSpeedup");
private int horizontalID = Animator.StringToHash("Horizontal");
private int speedRotateID = Animator.StringToHash("SpeedRotate");
private int speedZID = Animator.StringToHash("SpeedZ");
private int vaultID = Animator.StringToHash("Vault");
private int sliderID = Animator.StringToHash("Slider");
private int colliderID = Animator.StringToHash("Collider");
private int isHoldLogID = Animator.StringToHash("IsHoldLog");
private Vector3 matchTarget = Vector3.zero;
private CharacterController characterController;
public GameObject unityLog = null;
public Transform rightHand;
public Transform leftHand;
public PlayableDirector director;
// Start is called before the first frame update
void Start()
{
anim = GetComponent<Animator>();
characterController = GetComponent<CharacterController>();
//unityLog = transform.Find("Unity_Log").gameObject;
}
// Update is called once per frame
void Update()
{
anim.SetFloat(speedZID, Input.GetAxis("Vertical") * 4.1f);
anim.SetFloat(speedRotateID, Input.GetAxis("Horizontal") * 126);
//anim.SetFloat(speedID, Input.GetAxis("Vertical")*4.1f);
//anim.SetFloat(horizontalID, Input.GetAxis("Horizontal"));
//if (Input.GetKeyDown(KeyCode.LeftShift))
//{
// anim.SetBool(isSpeedupID, true);
//}
//if (Input.GetKeyUp(KeyCode.LeftShift))
//{
// anim.SetBool(isSpeedupID, false);
//}
ProcessVault();
ProcessSlider();
//if(anim.GetFloat(colliderID)>0.5f)
//{
// characterController.enabled = false;
//}
//else
//{
// characterController.enabled = true;
//}
characterController.enabled = anim.GetFloat(colliderID) < 0.5f;
}
private void ProcessVault()
{
bool isVault = false;
if (anim.GetFloat(speedZID) > 3 && anim.GetCurrentAnimatorStateInfo(0).IsName("Locomotion"))
{
RaycastHit hit;
if (Physics.Raycast(transform.position + Vector3.up * 0.3f, transform.forward, out hit, 4f))
{
if (hit.collider.tag == "Obstacle")
{
Vector3 point = hit.point;
point.y = hit.collider.transform.position.y + hit.collider.bounds.size.y + 0.07f;
matchTarget = point;
isVault = true;
}
}
}
anim.SetBool(vaultID, isVault);
if (anim.GetCurrentAnimatorStateInfo(0).IsName("Vault") && anim.IsInTransition(0) == false)
{
anim.MatchTarget(matchTarget, Quaternion.identity, AvatarTarget.LeftHand, new MatchTargetWeightMask(Vector3.one, 0), 0.32f, 0.4f);
}
}
private void ProcessSlider()
{
bool isSlider = false;
if (anim.GetFloat(speedZID) > 3 && anim.GetCurrentAnimatorStateInfo(0).IsName("Locomotion"))
{
RaycastHit hit;
if (Physics.Raycast(transform.position + Vector3.up * 1.5f, transform.forward, out hit, 3f))
{
if (hit.collider.tag == "Obstacle")
{
if (hit.distance > 2)
{
Vector3 point = hit.point;
point.y = 0;
matchTarget = point + transform.forward * 2;
isSlider = true;
}
}
}
}
anim.SetBool(sliderID, isSlider);
if (anim.GetCurrentAnimatorStateInfo(0).IsName("Slider") && anim.IsInTransition(0) == false)
{
anim.MatchTarget(matchTarget, Quaternion.identity, AvatarTarget.Root, new MatchTargetWeightMask(new Vector3(1,0,1), 0), 0.17f, 0.67f);
}
}
private void OnTriggerEnter(Collider other)
{
if(other.tag == "Log")
{
Destroy(other.gameObject);
CarryWood();
}
if(other.tag == "Playable")
{
director.Play();
}
}
void CarryWood()
{
unityLog.SetActive(true);
anim.SetBool(isHoldLogID, true);
}
private void OnAnimatorIK(int layerIndex)
{
if(layerIndex == 1)
{
int weight = anim.GetBool(isHoldLogID) ? 1 : 0;
anim.SetIKPosition(AvatarIKGoal.LeftHand, leftHand.position);
anim.SetIKRotation(AvatarIKGoal.LeftHand, leftHand.rotation);
anim.SetIKPositionWeight(AvatarIKGoal.LeftHand, weight);
anim.SetIKRotationWeight(AvatarIKGoal.LeftHand, weight);
anim.SetIKPosition(AvatarIKGoal.RightHand, rightHand.position);
anim.SetIKRotation(AvatarIKGoal.RightHand, rightHand.rotation);
anim.SetIKPositionWeight(AvatarIKGoal.RightHand, weight);
anim.SetIKRotationWeight(AvatarIKGoal.RightHand, weight);
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using UnityEngine;
using UnityEngine.UI;
public class ChatManager : MonoBehaviour
{
public string ipaddress = "192.168.3.11";
public int port = 7788;
public InputField textInput;
public Text chatText;
private Socket clientSocket;
private Thread t;
private byte[] data = new byte[1024];
private string message = ""; //消息容器
// Start is called before the first frame update
void Start()
{
ConnectToServer();
}
// Update is called once per frame
void Update()
{
if(message != null && message != "")
{
chatText.text += message + "\n";
message = "";
}
}
void ConnectToServer()
{
clientSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
clientSocket.Connect(new IPEndPoint(IPAddress.Parse(ipaddress), port));
//创建一个新的线程,用来接收消息
t = new Thread(ReceiveMessage);
t.Start();
}
void ReceiveMessage()
{
while (true)
{
if (clientSocket.Connected == false)
break;
int length = clientSocket.Receive(data);
message = Encoding.UTF8.GetString(data, 0, length);
print(message);
//chatText.text += "\n" + message;
}
}
void SendMessage(string message)
{
byte[] data = Encoding.UTF8.GetBytes(message);
clientSocket.Send(data);
}
public void OnSendButtonClick()
{
string value = textInput.text;
SendMessage(value);
textInput.text = "";
}
void OnDestroy()
{
clientSocket.Close(); //关闭连接
}
}
<file_sep>using System;
namespace HighLevel
{
class Program
{
static void CommonSort<T>(T[] sortArray, Func<T,T,bool> compareMethod)
{
bool swapped = true;
do
{
swapped = false;
for (int i = 0; i < sortArray.Length - 1; i++)
{
if (compareMethod(sortArray[i],sortArray[i + 1]))
{
T temp = sortArray[i];
sortArray[i] = sortArray[i + 1];
sortArray[i + 1] = temp;
swapped = true;
}
}
} while (swapped);
}
static void Main(string[] args)
{
Employee[] employees = new Employee[]
{
new Employee("gh",35),
new Employee("re",65),
new Employee("df",24),
new Employee("yhu",58),
new Employee("fg",124),
new Employee("fc",69)
};
CommonSort<Employee>(employees, Employee.Compare);
foreach(Employee em in employees)
{
Console.WriteLine(em.ToString());
}
Console.ReadKey();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace json操作
{
class Player
{
public string Name { get; set; }
public int Level { get; set; }
public int Age { get; set; }
public List<Skill> SkillList { get; set; }
public override string ToString()
{
return string.Format("Name:{0},Level:{1},Age:{2},SkillList:{3}", Name,Level,Age,SkillList);
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class ItemUI : MonoBehaviour
{
//public Image ItemImage;
//public void UpdateImage(Sprite s)
//{
// ItemImage.sprite = s; //更新图片
//}
public Text ItemName;
public void UpdateItem(string name)
{
ItemName.text = name;
}
}
<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.EventSystems;
public class GridUI : MonoBehaviour, IPointerEnterHandler, IPointerExitHandler,IBeginDragHandler,IDragHandler,IEndDragHandler
{
#region Enter&&Exit
public static Action<Transform> OnEnter;
public static Action OnExit;
public void OnPointerEnter(PointerEventData eventData) //鼠标移过去时触发
{
if(eventData.pointerEnter.tag == "Grid") //射线检测鼠标下面的物体是否tag为Grid
{
if (OnEnter!=null)
OnEnter(transform); //参数为该处格子的位置
}
}
public void OnPointerExit(PointerEventData eventData) //鼠标离开时触发
{
if (eventData.pointerEnter.tag == "Grid")
{
if (OnExit != null)
OnExit();
}
}
#endregion
public static Action<Transform> OnLeftBeginDrag;
public static Action<Transform,Transform> OnLeftEndDrag; //参数是拖动之前的位置和拖动之后的位置
public void OnBeginDrag(PointerEventData eventData)
{
if(eventData.button == PointerEventData.InputButton.Left) //按下鼠标左键时
{
if (OnLeftBeginDrag != null)
OnLeftBeginDrag(transform);
}
}
public void OnDrag(PointerEventData eventData)
{
}
public void OnEndDrag(PointerEventData eventData)
{
if(eventData.button == PointerEventData.InputButton.Left) //松开鼠标左键时
{
if (OnLeftEndDrag != null)
{
if(eventData.pointerEnter == null) //如果鼠标下面没有UI
OnLeftEndDrag(transform,null);
else
OnLeftEndDrag(transform, eventData.pointerEnter.transform);
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Xml;
namespace _8_xml操作
{
class Program
{
static void Main(string[] args)
{
List<Skill> skillList = new List<Skill>(); //创建技能信息集合
XmlDocument xmlDoc = new XmlDocument(); //XmlDocument专门用来解析xml文档
//xmlDoc.Load("skill.txt");
xmlDoc.LoadXml(File.ReadAllText("skill.txt"));
XmlNode rootNode = xmlDoc.FirstChild;
XmlNodeList skillNodeList = rootNode.ChildNodes;
foreach(XmlNode skillNode in skillNodeList)
{
Skill skill = new Skill();
XmlNodeList fieldNodeList = skillNode.ChildNodes;
foreach(XmlNode fieldNode in fieldNodeList)
{
if(fieldNode.Name == "id")
{
int id = Int32.Parse(fieldNode.InnerText);
skill.Id = id;
}else if(fieldNode.Name == "name")
{
string name = fieldNode.InnerText;
skill.Name = name;
skill.Lang = fieldNode.Attributes[0].Value;
}
else
{
skill.Damage = Int32.Parse(fieldNode.InnerText);
}
}
skillList.Add(skill);
}
foreach(Skill skill in skillList)
{
Console.WriteLine(skill);
}
Console.ReadKey();
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
public class PlayerHealth : MonoBehaviour
{
private Animator anim;
public float hp = 100f;
void Awake()
{
anim = GetComponent<Animator>();
}
public void TakeDamage(float damage)
{
hp -= damage;
if(hp <= 0)
{
anim.SetBool("Dead", false);
StartCoroutine(ReloadScene());
}
}
IEnumerator ReloadScene()
{
yield return new WaitForSeconds(4f);
SceneManager.LoadScene(0);
}
}
<file_sep>using System;
namespace _01背包问题_递归实现_带备忘的自顶向下法_
{
class Program
{
public static int[,] result = new int[11, 4];
static void Main(string[] args)
{
int m; //背包容量
int[] w = { 0, 3, 4, 5 }; //每个物品重量
int[] p = { 0, 4, 5, 6 }; //每个物品价值
Console.WriteLine(UpDown(10, 3, w, p));
Console.WriteLine(UpDown(3, 3, w, p));
Console.WriteLine(UpDown(4, 3, w, p));
Console.WriteLine(UpDown(5, 3, w, p));
Console.WriteLine(UpDown(7, 3, w, p));
Console.ReadKey();
}
public static int UpDown(int m, int i, int[] w, int[] p) //返回值是m可以存储的最大价值
{
if (i == 0 || m == 0) return 0;
if (result[m,i] != 0)
{
return result[m, i];
}
if (w[i] > m)
{
result[m,i] = UpDown(m, i - 1, w, p);
return result[m, i];
}
else
{
int maxValue1 = UpDown(m - w[i], i - 1, w, p) + p[i];
int maxValue2 = UpDown(m, i - 1, w, p);
if (maxValue1 > maxValue2)
{
result[m, i] = maxValue1;
}
else
{
result[m, i] = maxValue2;
}
return result[m, i];
}
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class KnapsackManager : MonoBehaviour
{
private static KnapsackManager _instance;
public static KnapsackManager Instance { get { return _instance; } }
public GridPanelUI GridPanelUI;
public TooltipUI TooltipUI;
public DragItemUI DragItemUI;
private bool isShow = false;
private bool isDrag = false;
public Dictionary<int, Item> ItemList { get; private set; }
void Awake()
{
_instance = this; //单例模式
Load();
GridUI.OnEnter += GridUI_OnEnter;
GridUI.OnExit += GridUI_OnExit;
GridUI.OnLeftBeginDrag += GridUI_OnLeftBeginDrag;
GridUI.OnLeftEndDrag += GridUI_OnLeftEndDrag;
}
void Update()
{
Vector2 position;
//将鼠标指向的屏幕上的位置的坐标转化为物体坐标并赋值给position
RectTransformUtility.ScreenPointToLocalPointInRectangle(GameObject.Find("KnapsackUI").transform as RectTransform, Input.mousePosition, null, out position);
if (isDrag)
{
DragItemUI.Show(); //另外创建的DragItemUI显示
DragItemUI.SetLocalPosition(position);
}
else if (isShow)
{
TooltipUI.Show();
TooltipUI.SetLocalPosition(position); //设置TooltipUI显示时的坐标
}
}
public void StoreItem(int itemId)
{
if (!ItemList.ContainsKey(itemId))
{
return;
}
Transform emptyGrid = GridPanelUI.GetEmptyGrid(); //得到空格子的位置
if(emptyGrid == null)
{
Debug.LogWarning("背包已满!");
return;
}
Item temp = ItemList[itemId]; //通过itemId查找Dictionary中的Item
this.CreateNewItem(temp, emptyGrid);
}
private void Load()
{
ItemList = new Dictionary<int, Item>();
Weapon w2 = new Weapon(0, "牛刀", "宰牛刀", 20, 10, "", 100);
Weapon w1 = new Weapon(1, "金枪", "可以射击", 150, 100, "", 190);
Consumable c1 = new Consumable(2, "红瓶", "加血", 20, 12, "", 20,0);
Consumable c2 = new Consumable(3, "蓝瓶", "加蓝", 40, 20, "", 0, 20);
Armor a1 = new Armor(4, "头盔", "保护头部", 120, 80, "", 5, 40, 1);
Armor a2 = new Armor(5, "胸甲", "护胸", 200, 100, "", 25, 30, 12);
ItemList.Add(w1.ID, w1);
ItemList.Add(w2.ID, w2);
ItemList.Add(c1.ID, c1);
ItemList.Add(c2.ID, c2);
ItemList.Add(a1.ID, a1);
ItemList.Add(a2.ID, a2);
}
#region 事件回调
private void GridUI_OnEnter(Transform gridTransform)
{
Item item = ItemModel.GetItem(gridTransform.name); //得到鼠标下面格子处的item
if (item == null)
return;
TooltipUI.UpdateTooltip(item.Name);
isShow = true;
}
private void GridUI_OnExit()
{
isShow = false;
TooltipUI.Hide();
}
private void GridUI_OnLeftBeginDrag(Transform gridTransform)
{
if (gridTransform.childCount == 0)
return;
else
{
Item item = ItemModel.GetItem(gridTransform.name);
DragItemUI.UpdateItem(item.Name);
Destroy(gridTransform.GetChild(0).gameObject);
isDrag = true;
}
}
private void GridUI_OnLeftEndDrag(Transform preTransform,Transform enterTransform)
{
isDrag = false;
DragItemUI.Hide();
if(enterTransform == null) //扔东西
{
ItemModel.DeleteItem(preTransform.name);
Debug.LogWarning("物品已扔");
}
else if(enterTransform.tag == "Grid") //拖到格子里
{
if (enterTransform.childCount == 0) //直接扔进去
{
Item item = ItemModel.GetItem(preTransform.name);
this.CreateNewItem(item, enterTransform);
ItemModel.DeleteItem(preTransform.name);
}
else //交换
{
Destroy(enterTransform.GetChild(0).gameObject);
Item preGridItem = ItemModel.GetItem(preTransform.name);
Item enterGridItem = ItemModel.GetItem(enterTransform.name);
this.CreateNewItem(preGridItem, enterTransform);
this.CreateNewItem(enterGridItem, preTransform);
}
}
else //拖到缝隙里就还原
{
Item item = ItemModel.GetItem(preTransform.name);
this.CreateNewItem(item, preTransform);
}
}
#endregion
private void CreateNewItem(Item item,Transform parent)
{
GameObject itemPrefab = Resources.Load<GameObject>("Prefabs/Item"); //找到Prefab
itemPrefab.GetComponent<ItemUI>().UpdateItem(item.Name); //将Prefab中的名字设为查找到的Item的名字
GameObject itemGo = Instantiate(itemPrefab); //实例化Prefab
itemGo.transform.SetParent(parent); //设置实例化后物体的父节点
itemGo.transform.localPosition = Vector3.zero; //实例化后物体的相对坐标为0
itemGo.transform.localScale = Vector3.one; //相对大小为1
ItemModel.StoreItem(parent.name, item); //存储数据
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class InputDetector : MonoBehaviour
{
void Update()
{
if (Input.GetMouseButtonDown(2))
{
int index = Random.Range(0, 6);
KnapsackManager.Instance.StoreItem(index);
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Camera : MonoBehaviour
{
public Transform player;
//public float smoothTime = 0.3f;
//private Vector3 velocity = Vector3.zero;
private Vector3 offset = Vector3.zero;
void Start()
{
offset = transform.position - player.position;
}
void Update()
{
transform.position = player.transform.position + offset;
}
/*void Update()
{ //逐渐将向量变到期望的位置
transform.position = Vector3.SmoothDamp(transform.position,player.position,ref velocity,smoothTime);
}*/
}
<file_sep><<<<<<< HEAD
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class DeactivateMe : MonoBehaviour
{
void Awake()
{
gameObject.SetActive(false);
}
}
=======
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class DeactivateMe : MonoBehaviour
{
void Awake()
{
gameObject.SetActive(false);
}
}
>>>>>>> 1d8fe8c08b28c74b2f92cf002b742f23a92ee478
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.AI;
public class EnemyAnimation : MonoBehaviour
{
public float speedDampTime = 0.3f;
public float anglarSpeedDampTime = 0.3f;
private NavMeshAgent navAgent;
private Animator anim;
private EnemySight sight;
private PlayerHealth health;
void Awake()
{
navAgent = GetComponent<NavMeshAgent>();
anim = GetComponent<Animator>();
sight = GetComponent<EnemySight>();
health = GameObject.FindGameObjectWithTag(Tags.player).GetComponent<PlayerHealth>();
}
// Update is called once per frame
void Update()
{
if(navAgent.desiredVelocity == Vector3.zero)
{
anim.SetFloat("Speed", 0,speedDampTime,Time.deltaTime);
anim.SetFloat("AnglarSpeed", 0,anglarSpeedDampTime,Time.deltaTime);
}
else
{
float angle = Vector3.Angle(transform.forward, navAgent.desiredVelocity);
float angleRad = 0;
if(angle > 90)
{
anim.SetFloat("Speed", 0, speedDampTime, Time.deltaTime);
}
else
{//即目标方向在机器人朝向上的投影,求得实时移动速度,达到平滑过渡目的;走向过程中呈弧形,并慢慢加速
Vector3 projection = Vector3.Project(navAgent.desiredVelocity, transform.forward);
anim.SetFloat("Speed", projection.magnitude, speedDampTime, Time.deltaTime);
}
angleRad = angle * Mathf.Deg2Rad;//角度转换为弧度
//控制机器人的左右转向,Cross为以transform.forward和navAgent.desiredVelocity分别为拇指和食指的
//左手坐标系,返回中指指向的向量
Vector3 crossRes = Vector3.Cross(transform.forward, navAgent.desiredVelocity);
if(crossRes.y < 0)//若crossRes.y < 0,则右转
{
angleRad = -angleRad;
}
anim.SetFloat("AnglarSpeed", angleRad, anglarSpeedDampTime, Time.deltaTime);
navAgent.nextPosition = transform.position;
}
if (health.hp > 0)
{
anim.SetBool("PlayerInSight", sight.playerInSight);
}
else
{
anim.SetBool("PlayerInSight", false);
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Player : MonoBehaviour
{
public float moveSpeed = 3;
public float rotateSpeed = 7;
public bool hasKey = false;
public GameObject ButtonUp;
public GameObject ButtonDown;
public GameObject ButtonRight;
public GameObject ButtonLeft;
private Animator anim;
private AudioSource audio;
private float h = 0;
private float v = 0;
private OnButtonPressed Up;
private OnButtonPressed Down;
private OnButtonPressed Right;
private OnButtonPressed Left;
void Awake()
{
anim = GetComponent<Animator>();
audio = GetComponent<AudioSource>();
Up = ButtonUp.GetComponent<OnButtonPressed>();
Down = ButtonDown.GetComponent<OnButtonPressed>();
Right = ButtonRight.GetComponent<OnButtonPressed>();
Left = ButtonLeft.GetComponent<OnButtonPressed>();
}
void Update()
{
if (Input.GetKey(KeyCode.LeftShift))
{
anim.SetBool("Sneak",true);
}
else
{
anim.SetBool("Sneak", false);
}
//float h = Input.GetAxis("Horizontal");
//float v = Input.GetAxis("Vertical");
if (Up.isDown && !Down.isDown)
{
v = 1.0f;
}
else if (!Up.isDown && Down.isDown)
{
v = -1.0f;
}
else { v = 0; }
if (Right.isDown&&!Left.isDown)
{
h = 1.0f;
}else if(!Right.isDown && Left.isDown)
{
h = -1.0f;
}
else { h = 0; }
if (Mathf.Abs(h) > 0.1 || Mathf.Abs(v) > 0.1)
{
float newSpeed = Mathf.Lerp(anim.GetFloat("Speed"), 5.6f, moveSpeed * Time.deltaTime);
anim.SetFloat("Speed", newSpeed);
Vector3 targetDir = new Vector3(h, 0, v);
Quaternion newRotation = Quaternion.LookRotation(targetDir, Vector3.up);
transform.rotation = Quaternion.Lerp(transform.rotation, newRotation, rotateSpeed * Time.deltaTime);
}
else
{
anim.SetFloat("Speed", 0);
}
if (anim.GetCurrentAnimatorStateInfo(0).IsName("Locomotion"))
{
PlayFootMusic();
}
else
{
StopFootMusic();
}
}
private void PlayFootMusic()
{
if (!audio.isPlaying)
{
audio.Play();
}
}
private void StopFootMusic()
{
if (audio.isPlaying)
{
audio.Stop();
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class FController : MonoBehaviour
{
public Transform player;
public float rotSpeed = 5f;
public Vector3 vc;
public GameObject blood;
// Start is called before the first frame update
void Start()
{
player = LevelManager.lm.player;
}
// Update is called once per frame
void Update()
{
Vector3 targetDir = player.position - transform.position;
float step = rotSpeed * Time.deltaTime;
Vector3 newDir = Vector3.RotateTowards(transform.forward, targetDir, step, 0);
transform.rotation = Quaternion.LookRotation(newDir);
transform.Translate(Vector3.forward * Time.deltaTime * 8);
}
public void Hurt()
{
Destroy(gameObject);
Instantiate(blood, transform.position, Quaternion.identity);
}
}
<file_sep>using System;
namespace 钢条切割问题_自底向上法_动态规划_
{
class Program
{
static void Main(string[] args)
{
int[] result = new int[11];//保存子问题的解
int[] p = { 0, 1, 5, 8, 9, 10, 17, 17, 20, 24, 30 }; //索引代表钢条的长度,值代表价格
Console.WriteLine(ButtomUp(0, p, result));
Console.WriteLine(ButtomUp(1, p, result));
Console.WriteLine(ButtomUp(2, p, result));
Console.WriteLine(ButtomUp(3, p, result));
Console.WriteLine(ButtomUp(4, p, result));
Console.WriteLine(ButtomUp(5, p, result));
Console.WriteLine(ButtomUp(6, p, result));
Console.WriteLine(ButtomUp(7, p, result));
Console.WriteLine(ButtomUp(8, p, result));
Console.WriteLine(ButtomUp(9, p, result));
Console.WriteLine(ButtomUp(10, p, result));
Console.ReadKey();
}
public static int ButtomUp(int n,int[] p,int[] result)
{
for (int i = 1; i <= n; i++)
{
//下面取得钢条长度为i时的最大收益
int tempMaxPrice = -1;
for (int j = 1; j <= i; j++)
{
int maxPrice = p[j] + result[i - j];
if (maxPrice > tempMaxPrice)
{
tempMaxPrice = maxPrice;
}
}
result[i] = tempMaxPrice;
}
return result[n];
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class RayCasting : MonoBehaviour
{
public float distance;
void Update()
{
RaycastHit hit;
Debug.DrawRay(transform.position, Vector3.down * distance);
if (Physics.Raycast(transform.position, Vector3.down, out hit, distance))
{
Destroy(hit.collider.gameObject);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace _8_xml操作
{
class Skill
{
public int Id { get; set; }
public string Name { get; set; }
public string Lang { get; set; }
public int Damage { get; set; }
public override string ToString()
{
return string.Format("Id:{0},Name:{1},Lang:{2},Damage:{3}",Id,Name,Lang,Damage);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
namespace 活动选择问题_动态规划思路_自底向上_
{
class Program
{
static void Main(string[] args)
{
int[] s = { 0, 1, 3, 0, 5, 3, 5, 6, 8, 8, 2, 12, 24 };
int[] f = { 0, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 24 };
List<int>[,] result = new List<int>[13, 13]; //即c[i,j],存储最后结果,默认值是null
for (int m = 0; m < 13; m++)
{
for(int n = 0; n < 13; n++)
{
result[m, n] = new List<int>(); //默认值是空list集合
}
}
for(int j = 0; j < 13; j++)
{
for (int i = 0; i < j-1; i++)
{
List<int> sij = new List<int>();
for(int number = 1;number < s.Length - 1; number++)
{
if (s[number] >= f[i] && f[number] <= s[j])
{
sij.Add(number); //找到符合条件的k活动,添加到sij列表中
}
}
if (sij.Count > 0)
{
int maxCount = 0;
List<int> tempList = new List<int>();
foreach(int number in sij)
{
int count = result[i, number].Count + result[number, j].Count + 1;
if (count > maxCount) //寻找conut的最大值
{
maxCount = count;
tempList = result[i, number].Union<int>(result[number, j]).ToList<int>(); //取两部分的并集
tempList.Add(number); //再加上number,即取三部分的并集
}
}
result[i, j] = tempList;
}
}
}
List<int> l = result[0, 12];
foreach(int temp in l)
{
Console.WriteLine(temp);
}
Console.ReadKey();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace 二叉排序树_链式存储
{
class BSNode
{
public BSNode LeftChild { get; set; }
public BSNode RightChild { get; set; }
public BSNode Parent { get; set; }
public int Data { get; set; }
public BSNode()
{
}
public BSNode(int item)
{
this.Data = item;
}
}
}
<file_sep>using LitJson;
using System;
using System.Collections.Generic;
using System.IO;
namespace json操作
{
class Program
{
static void Main(string[] args)
{
//List<Skill> skillList = new List<Skill>();
//JsonData jsonData = JsonMapper.ToObject(File.ReadAllText("json技能信息.txt")); //使用JsonMapper解析json文本,JsonData代表一个数组或者一个对象,这里代表数组
//foreach (JsonData temp in jsonData) //这里temp代表一个对象
//{
// Skill skill = new Skill();
// JsonData idValue = temp["id"]; //通过字符串索引器取得键值对的值
// JsonData nameValue = temp["name"];
// JsonData damageValue = temp["damage"];
// int id = Int32.Parse(idValue.ToString());
// int damage = Int32.Parse(damageValue.ToString());
// skill.id = id;
// skill.damage = damage;
// skill.name = nameValue.ToString();
// skillList.Add(skill);
//}
//foreach(var temp in skillList)
//{
// Console.WriteLine(temp);
//}
//Skill[] skillArray = JsonMapper.ToObject<Skill[]>(File.ReadAllText("json技能信息.txt"));
//foreach(var temp in skillArray)
//{
// Console.WriteLine(temp);
//}
//List<Skill> skillList = JsonMapper.ToObject<List<Skill>>(File.ReadAllText("json技能信息.txt"));
//foreach (var temp in skillList)
//{
// Console.WriteLine(temp);
//}
//Player p = JsonMapper.ToObject<Player>(File.ReadAllText("player.txt"));
//Console.WriteLine(p);
//foreach(var temp in p.SkillList)
//{
// Console.WriteLine(temp);
//}
Player p = new Player();
p.Name = "花千骨";
p.Level = 100;
p.Age = 18;
string json = JsonMapper.ToJson(p);
Console.WriteLine(json);
Console.ReadKey();
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class AddTorqueExample : MonoBehaviour
{
public float amount = 50f;
private Rigidbody rb;
void Start()
{
rb = GetComponent<Rigidbody>();
}
void FixedUpdate()
{
float h = Input.GetAxis("Horizontal") * amount * Time.deltaTime;
float v = Input.GetAxis("Vertical") * amount * Time.deltaTime;
rb.AddTorque(transform.up * h);
rb.AddTorque(transform.right * v);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
public class Lift : MonoBehaviour
{
public Transform outerLeft;
public Transform innerLeft;
public Transform outerRight;
public Transform innerRight;
public float liftUpTime = 3f;
private float liftUpTimer = 0;
private bool isIn = false;
private float gameWinTimer = 0;
// Update is called once per frame
void Update()
{
innerLeft.position = new Vector3(outerLeft.position.x, innerLeft.position.y, innerLeft.position.z);
innerRight.position = new Vector3(outerRight.position.x, innerRight.position.y, innerRight.position.z);
if (isIn)
{
liftUpTimer += Time.deltaTime;
if(liftUpTimer > liftUpTime)
{
transform.Translate(Vector3.up * Time.deltaTime);
gameWinTimer += Time.deltaTime;
if(gameWinTimer > 1f)
{
SceneManager.LoadScene(0);
}
}
}
}
void OnTriggerStay(Collider other)
{
if (other.tag == Tags.player)
{
isIn = true;
}
}
void OnTriggerExit(Collider other)
{
if (other.tag == Tags.player)
{
isIn = false;
liftUpTimer = 0;
}
}
}
<file_sep><<<<<<< HEAD
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class ControlSound : MonoBehaviour
{
public AudioSource sound;
public Slider sd;
public void Con_Sound()
{
sound.volume = sd.value;
}
}
=======
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class ControlSound : MonoBehaviour
{
public AudioSource sound;
public Slider sd;
public void Con_Sound()
{
sound.volume = sd.value;
}
}
>>>>>>> 1d8fe8c08b28c74b2f92cf002b742f23a92ee478
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class BloodParticle : MonoBehaviour
{
public ParticleSystem ptc;
public void StopPtc()
{
ptc.Stop();
GetComponent<BoxCollider>().enabled = false;
Destroy(gameObject, 2);
LevelManager.lm.MakePlane();
}
}
<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
namespace _003_队列
{
class Program
{
static void Main(string[] args)
{
//Queue<int> queue = new Queue<int>();
//IQueue<int> queue = new SeqQueue<int>();
IQueue<int> queue = new LinkQueue<int>();
queue.Enqueue(12); //队首
queue.Enqueue(45);
queue.Enqueue(67);
queue.Enqueue(89); //队尾
Console.WriteLine(queue.Count);
int i = queue.Dequeue(); //取得队首元素并删除
Console.WriteLine(i);
Console.WriteLine(queue.Count);
Console.WriteLine(queue.Peek()); //取得队首元素不删除
Console.WriteLine(queue.Count);
queue.Clear();
Console.WriteLine(queue.Count);
Console.ReadKey();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace json操作
{
class Skill
{
public int id;
public int damage;
public string name;
public override string ToString()
{
return string.Format("Id:{0},Damage:{1},Name:{2}", id, damage, name);
}
}
}
<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
namespace _002_栈
{
class Program
{
static void Main(string[] args)
{
//Stack<char> stack = new Stack<char>();
//使用自定义的顺序栈:
//IStackDS<char> stack = new SeqStack<char>();
//使用自定义的链栈:
IStackDS<char> stack = new LinkStack<char>();
stack.Push('a');
stack.Push('b');
stack.Push('c');
Console.WriteLine(stack.Count);
Console.WriteLine(stack.Pop()); //找到栈顶元素并删除
Console.WriteLine(stack.Count);
Console.WriteLine(stack.Peek()); //找到栈顶元素但不删除
Console.WriteLine(stack.Count);
stack.Clear();
Console.WriteLine(stack.Count);
Console.ReadKey();
}
}
}
<file_sep>using System;
namespace 钱币找零问题_贪心算法
{
class Program
{
static void Main(string[] args)
{
int[] count = { 3, 0, 2, 1, 0, 3, 5 };
int[] amount = { 1, 2, 5, 10, 20, 50, 100 };
int[] result = Change(320, count, amount);
foreach (var i in result)
{
Console.Write(i + " ");
}
Console.ReadKey();
}
public static int[] Change(int k,int[] count,int[] amount) //k为要换的钱数
{
if (k == 0) return new int[amount.Length + 1];
int total = 0;
int index = amount.Length - 1; //此时要换面额钱币的索引
int[] result = new int[amount.Length + 1]; //存储每个面额的钱币要换多少张
while (true)
{
if(k <= 0 || index <= -1) break;
if(k > count[index] * amount[index]) //该面额的钱不够换时
{
result[index] = count[index];
k -= count[index] * amount[index];
}
else //够换时
{
result[index] = k / amount[index];
k -= result[index] * amount[index];
}
index--;
}
result[amount.Length] = k; //result数组的最后一位存储还剩多少钱无法换
return result;
}
}
}
<file_sep>using System;
namespace _01背包问题_自底向上法_动态规划_
{
class Program
{
static void Main(string[] args)
{
int m; //背包容量
int[] w = { 0, 3, 4, 5 }; //每个物品重量
int[] p = { 0, 4, 5, 6 }; //每个物品价值
Console.WriteLine(BottomUp(10, 3, w, p));
Console.WriteLine(BottomUp(3, 3, w, p));
Console.WriteLine(BottomUp(4, 3, w, p));
Console.WriteLine(BottomUp(5, 3, w, p));
Console.WriteLine(BottomUp(7, 3, w, p));
Console.ReadKey();
}
public static int[,] result = new int[11, 4];
public static int BottomUp(int m, int i, int[] w, int[] p)
{
if (result[m, i] != 0) return result[m, i];
for (int tempM = 1; tempM <= m; tempM++)
{
for(int tempI = 1;tempI <= i; tempI++)
{
if (result[tempM, tempI] != 0) continue;
if (w[tempI] > tempM)
{
result[tempM, tempI] = result[tempM, tempI - 1];
}
else
{
int maxValue1 = result[tempM - w[tempI], tempI - 1] + p[tempI];
int maxValue2 = result[tempM, tempI - 1];
if(maxValue1 > maxValue2)
{
result[tempM, tempI] = maxValue1;
}
else
{
result[tempM, tempI] = maxValue2;
}
}
}
}
return result[m, i];
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace 观察者设计模式_猫捉老鼠
{
class Cat
{
private string name;
private string color;
public Cat(string name,string color)
{
this.name = name;
this.color = color;
}
//public void CatComing(Mouse mouse1,Mouse mouse2)
public void CatComing()
{
Console.WriteLine(color + "的猫" + name + "过来了,喵喵喵");
//mouse1.RunAway();
//mouse2.RunAway();
if (catCome != null)
catCome();
}
public event Action catCome; //声明一个事件
}
}
<file_sep>using System;
namespace 观察者设计模式_猫捉老鼠
{
class Program
{
static void Main(string[] args)
{
Cat cat = new Cat("加菲猫", "黄色");
Mouse mouse1 = new Mouse("米奇", "黑色",cat);
//cat.catCome += mouse1.RunAway;
Mouse mouse2 = new Mouse("唐老鸭", "红色",cat);
//cat.catCome += mouse2.RunAway;
//cat.CatComing(mouse1,mouse2);
cat.CatComing();
//cat.catCome(); //事件不能在类的外部触发,只能在类的内部触发
Console.ReadKey();
}
}
}
<file_sep>using UnityEngine;
using System.Collections;
public class Move : MonoBehaviour
{
void Update()
{
transform.Translate(Input.GetAxis("Horizontal"), 0, Input.GetAxis("Vertical"));
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class TextShow : MonoBehaviour
{
public Text text;
void Start()
{
text.text = "WASD to Move\nZ to Switch\nShift to Sneak";
}
// Update is called once per frame
void Update()
{
}
}
<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
using UnityEngine.Networking;
using System.IO;
public class LoadGame : MonoBehaviour {
public Slider processView;
// Use this for initialization
void Start () {
LoadGameMethod();
}
// Update is called once per frame
void Update () {
}
public void LoadGameMethod()
{
StartCoroutine(LoadResourceCorotine());
StartCoroutine(StartLoading_4(2));
}
private IEnumerator StartLoading_4(int scene)
{
int displayProgress = 0;
int toProgress = 0;
AsyncOperation op = SceneManager.LoadSceneAsync(scene);
op.allowSceneActivation = false;
while (op.progress < 0.9f)
{
toProgress = (int)op.progress * 100;
while (displayProgress < toProgress)
{
++displayProgress;
SetLoadingPercentage(displayProgress);
yield return new WaitForEndOfFrame();
}
}
toProgress = 100;
while (displayProgress < toProgress)
{
++displayProgress;
SetLoadingPercentage(displayProgress);
yield return new WaitForEndOfFrame();
}
op.allowSceneActivation = true;
}
IEnumerator LoadResourceCorotine()
{
UnityWebRequest request = UnityWebRequest.Get(@"http://localhost/fish.lua.txt");
yield return request.SendWebRequest();
string str = request.downloadHandler.text;
File.WriteAllText(@"F:\GameResources\CatchFish\PlayerGamePackage\fish.lua.txt",str);
UnityWebRequest request1 = UnityWebRequest.Get(@"http://localhost/fishDispose.lua.txt");
yield return request1.SendWebRequest();
string str1 = request1.downloadHandler.text;
File.WriteAllText(@"F:\GameResources\CatchFish\PlayerGamePackage\fishDispose.lua.txt", str1);
}
private void SetLoadingPercentage(float v)
{
processView.value = v / 100;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Door : MonoBehaviour
{
public bool requireKey = false;
private int count = 0;
private Animator anim;
private AudioSource audio;
void Awake()
{
anim = GetComponent<Animator>();
audio = GetComponent<AudioSource>();
}
void Update()
{
anim.SetBool("Close", count <= 0);
if (anim.IsInTransition(0))
{
if (!audio.isPlaying)
{
audio.Play();
}
}
}
void OnTriggerEnter(Collider other)
{
if (requireKey)
{
if (other.tag == Tags.player)
{
Player player = other.GetComponent<Player>();
if (player.hasKey)
{
count++;
}
}
}
else
{
if (other.tag == Tags.player)
{
count++;
}else if(other.tag == Tags.enemy && other.GetComponent<Collider>().isTrigger == false)
{
count++;
}
}
}
void OnTriggerExit(Collider other)
{
if (requireKey)
{
if (other.tag == Tags.player)
{
Player player = other.GetComponent<Player>();
if (player.hasKey)
{
count--;
}
}
}
else
{
if (other.tag == Tags.player)
{
count--;
}
else if (other.tag == Tags.enemy && other.GetComponent<Collider>().isTrigger == false)
{
count--;
}
}
}
}
<file_sep>using System;
using System.Net.Sockets;
using System.Text;
namespace tcpclient
{
class Program
{
static void Main(string[] args)
{
TcpClient client = new TcpClient("192.168.3.11", 7788); //当创建tcpclient对象时,就会自动连接server
NetworkStream stream = client.GetStream(); //通过网络流进行数据交换
while (true)
{
string message = Console.ReadLine();
byte[] data = Encoding.UTF8.GetBytes(message);
stream.Write(data, 0, data.Length); //write用来写入数据
}
stream.Close();
client.Close();
Console.ReadKey();
}
}
}
<file_sep>using System;
namespace 最大子数组问题_分治法
{
class Program
{
struct SubArray //最大子数组的结构体
{
public int startIndex;
public int endIndex;
public int total;
}
static void Main(string[] args)
{
int[] priceArray = { 100, 113, 110, 85, 105, 102, 86, 63, 81, 101, 94, 106, 101, 79, 94, 90, 97 };
int[] pf = new int[priceArray.Length - 1]; //价格波动表
for (int i = 1; i < priceArray.Length; i++)
{
pf[i - 1] = priceArray[i] - priceArray[i - 1];
}
SubArray subArray = GetMaxSubArray(0, pf.Length - 1, pf);
Console.WriteLine(subArray.startIndex);
Console.WriteLine(subArray.endIndex);
Console.ReadKey();
}
static SubArray GetMaxSubArray(int low,int high,int[] array) //取得数组从low到high索引的最大子数组
{
if (low == high)
{
SubArray subarray ;
subarray.startIndex = low;
subarray.endIndex = high;
subarray.total = array[low];
return subarray;
}
int mid = (low + high) / 2;
SubArray subArray1 = GetMaxSubArray(low, mid, array); //从低区间找最大子数组
SubArray subArray2 = GetMaxSubArray(mid + 1, high, array); //从高区间找最大子数组
//从【low,mid】找到最大子数组【i,mid】
int total1 = array[mid];
int startIndex = mid;
int totalTemp = 0;
for (int i = mid; i >= low; i--)
{
totalTemp += array[i];
if (totalTemp > total1)
{
total1 = totalTemp;
startIndex = i;
}
}
//从【mid+1,high】找到最大子数组【mid+1,j】
int total2 = array[mid + 1];
int endIndex = mid + 1;
totalTemp = 0;
for (int j = mid +1; j <=high; j++)
{
totalTemp += array[j];
if (totalTemp > total2)
{
total2 = totalTemp;
endIndex = j;
}
}
//比较三种情况
SubArray subArray3;
subArray3.startIndex = startIndex;
subArray3.endIndex = endIndex;
subArray3.total = total1 + total2;
if (subArray1.total >= subArray2.total&&subArray1.total >= subArray3.total)
{
return subArray1;
}
else if (subArray2.total >= subArray1.total && subArray2.total >= subArray3.total)
{
return subArray2;
}
else
{
return subArray3;
}
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class VolumetricLightController : MonoBehaviour
{
public VolumetricLight vLight;
public float targetScatter;
// Update is called once per frame
void Update()
{
vLight.ScatteringCoef = Mathf.Lerp(vLight.ScatteringCoef, targetScatter, 0.1f);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class AlermLight : MonoBehaviour
{
public static AlermLight _instance;
public bool alermOn = false;
public float animationSpeed = 1;
private float lowIntensity = 0;
private float highIntensity = 0.5f;
private float targetIntensity;
private Light light;
// Start is called before the first frame update
void Awake()
{
targetIntensity = highIntensity;
alermOn = false;
_instance = this;
light = GetComponent<Light>();
}
// Update is called once per frame
void Update()
{
if (alermOn)
{
light.intensity = Mathf.Lerp(light.intensity, targetIntensity, Time.deltaTime * animationSpeed);
if(Mathf.Abs(light.intensity - targetIntensity) < 0.05f)
{
if(targetIntensity == highIntensity)
{
targetIntensity = lowIntensity;
}else if(targetIntensity == lowIntensity)
{
targetIntensity = highIntensity;
}
}
}
else
{
light.intensity = Mathf.Lerp(light.intensity, 0, Time.deltaTime * animationSpeed);
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class CCTVCam : MonoBehaviour
{
void OnTriggerStay(Collider other)
{
if(other.tag == Tags.player)
{
GameController._instance.SeePlayer(other.transform);
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Laser : MonoBehaviour
{
public bool isFlicker = false;
public float onTime = 3;
public float offTime = 3;
private float timer = 0;
private Renderer renderer;
void Start()
{
renderer = GetComponent<Renderer>();
}
void Update()
{
if (isFlicker)
{
timer += Time.deltaTime;
if (renderer.enabled)
{
if(timer >= onTime)
{
renderer.enabled = false;
timer = 0;
}
}
if (!renderer.enabled)
{
if (timer >= offTime)
{
renderer.enabled = true;
timer = 0;
}
}
}
}
void OnTriggerStay(Collider other)
{
if (other.tag == Tags.player)
{
GameController._instance.SeePlayer(other.transform);
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Keycard : MonoBehaviour
{
public AudioClip musicPickup;
void OnTriggerEnter(Collider other)
{
if(other.tag == Tags.player)
{
Player player = other.GetComponent<Player>();
player.hasKey = true;
AudioSource.PlayClipAtPoint(musicPickup, transform.position);
Destroy(gameObject);
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class VolumetricLightTrigger : MonoBehaviour
{
public VolumetricLightController vLightCon;
void OnTriggerEnter()
{
vLightCon.targetScatter = 1.0f;
}
void OnTriggerExit()
{
vLightCon.targetScatter = 0.0f;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class LevelManager : MonoBehaviour
{
public static LevelManager lm;
public GameObject plane;
private Vector3 pos = Vector3.zero;
public Vector3 origin = new Vector3(0, -1.41f, 0);
void Awake()
{
lm = this;
}
void Start()
{
MakePlane();
}
public void MakePlane()
{
origin = origin + Vector3.forward * Random.Range(7f,15f);
Instantiate(plane, origin, plane.transform.rotation);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using DG.Tweening;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
public class View : MonoBehaviour
{
public Ctrl ctrl;
public RectTransform logoName;
public RectTransform menuUI;
public RectTransform gameUI;
public GameObject restartButton;
public GameObject gameOverUI;
public GameObject settingUI;
public GameObject rankUI;
public Text score;
public Text highScore;
public Text gameOverScore;
public Text rankScore;
public Text rankHighScore;
public Text rankNumbersGame;
private GameObject mute;
void Awake()
{
ctrl = GameObject.FindGameObjectWithTag("Ctrl").GetComponent<Ctrl>();
mute = transform.Find("Canvas/SettingUI/AudioButton/Mute").gameObject;
}
public void ShowMenu()
{
logoName.gameObject.SetActive(true);
logoName.DOMoveY(770.05f, 0.5f);
menuUI.gameObject.SetActive(true);
menuUI.DOMoveY(103.3f, 0.5f);
}
public void HideMenu()
{
logoName.DOMoveY(1050.9f, 0.5f)
.OnComplete(delegate { logoName.gameObject.SetActive(false); }); //移动完成时执行
menuUI.DOMoveY(-103.3f, 0.5f)
.OnComplete(delegate { menuUI.gameObject.SetActive(false); });
}
public void UpdateGameUI(int score, int highScore)
{
this.score.text = score.ToString();
this.highScore.text = highScore.ToString();
}
public void ShowGameUI(int score = 0,int highScore = 0)
{
this.score.text = score.ToString();
this.highScore.text = highScore.ToString();
gameUI.gameObject.SetActive(true);
gameUI.DOMoveY(781.66f, 0.5f);
}
public void HideGameUI()
{
gameUI.DOMoveY(1040.3f, 0.5f)
.OnComplete(delegate { gameUI.gameObject.SetActive(false); });
}
public void ShowRestartButton()
{
restartButton.SetActive(true);
}
public void ShowGameOverUI(int score = 0)
{
gameOverUI.SetActive(true);
gameOverScore.text = score.ToString();
}
public void HideGameOverUI()
{
gameOverUI.SetActive(false);
}
public void OnHomeButtonClick()
{
ctrl.audioManager.PlayCursor();
SceneManager.LoadScene(SceneManager.GetActiveScene().buildIndex); //加载当前场景
}
public void OnSettingButtonClick()
{
ctrl.audioManager.PlayCursor();
settingUI.SetActive(true);
}
public void SetMuteActive(bool isActive)
{
mute.SetActive(isActive);
}
public void OnSettingUIClick()
{
ctrl.audioManager.PlayCursor();
settingUI.SetActive(false);
}
//public void OnRankButtonClick()
//{
// ctrl.audioManager.PlayCursor();
// rankUI.SetActive(true);
//}
public void ShowRankUI(int score,int highScore,int numbersGame)
{
this.rankScore.text = score.ToString();
this.rankHighScore.text = highScore.ToString();
this.rankNumbersGame.text = numbersGame.ToString();
rankUI.SetActive(true);
}
public void OnRankUIClick()
{
ctrl.audioManager.PlayCursor();
rankUI.SetActive(false);
}
}
<file_sep>using System;
namespace 二叉排序树_链式存储
{
class Program
{
static void Main(string[] args)
{
BSTree tree = new BSTree();
int[] data = { 62, 58, 88, 47, 73, 99, 35, 51, 93, 37 }; //添加的顺序要按照层序遍历
foreach (var t in data)
{
tree.Add(t);
}
tree.MiddleTraversal();
Console.WriteLine();
Console.WriteLine(tree.Find(99));
Console.WriteLine(tree.Find(100));
tree.Delete(35);
tree.MiddleTraversal();
Console.WriteLine();
Console.ReadKey();
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Ball : MonoBehaviour
{
void OnTriggerEnter(Collider other)
{
if(other.gameObject.tag == "Enemy")
{
other.gameObject.GetComponent<FController>().Hurt();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace 二叉树_顺序结构存储
{ //如果节点是空的,则这个节点所在的数组位置,设置为-1
class BiTree<T>
{
private T[] data;
private int count = 0;
public BiTree(int capacity) //这个参数是容量
{
data = new T[capacity];
}
public bool Add(T item)
{
if (count >= data.Length)
{
return false;
}
data[count] = item;
count++;
return true;
}
public void FirstTraversal()
{
FirstTraversal(0);
}
private void FirstTraversal(int index)
{
if (index >= count) return;
int number = index + 1; //得到要遍历的这个节点的编号
if (data[index].Equals(-1)) return;
Console.Write(data[index] + " ");
int leftNumber = number * 2; //得到左子节点的编号
int rightNumber = number * 2 + 1; //得到右子节点的编号
FirstTraversal(leftNumber - 1);
FirstTraversal(rightNumber - 1);
}
public void MiddleTraversal()
{
MiddleTraversal(0);
}
private void MiddleTraversal(int index)
{
if (index >= count) return;
int number = index + 1; //得到要遍历的这个节点的编号
if (data[index].Equals(-1)) return;
int leftNumber = number * 2; //得到左子节点的编号
int rightNumber = number * 2 + 1; //得到右子节点的编号
MiddleTraversal(leftNumber - 1);
Console.Write(data[index] + " ");
MiddleTraversal(rightNumber - 1);
}
public void LastTraversal()
{
LastTraversal(0);
}
private void LastTraversal(int index)
{
if (index >= count ) return;
int number = index + 1; //得到要遍历的这个节点的编号
if (data[index].Equals(-1)) return;
int leftNumber = number * 2; //得到左子节点的编号
int rightNumber = number * 2 + 1; //得到右子节点的编号
LastTraversal(leftNumber - 1);
LastTraversal(rightNumber - 1);
Console.Write(data[index] + " ");
}
public void LayerTraversal()
{
for (int i = 0; i < count; i++)
{
if (data[i].Equals(-1)) continue;
Console.Write(data[i] + " ");
}
Console.WriteLine();
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
public class Player : MonoBehaviour
{
public Rigidbody myRig;
private float keyTime = 0;
public Scrollbar myBar;
public Text score;
private float myScore = 0;
// Update is called once per frame
void Update()
{
if (Input.GetKey(KeyCode.Space))
{
keyTime += Time.deltaTime;
}
if (Input.GetKeyUp(KeyCode.Space))
{
myRig.AddForce(new Vector3(0,1,1) * 1000f * keyTime);
keyTime = 0;
}
myBar.size = keyTime;
if(transform.position.y < -20)
{
SceneManager.LoadScene(0);
}
}
private void OnTriggerEnter(Collider other)
{
//Destroy(other.gameObject);
other.GetComponent<BloodParticle>().StopPtc();
myScore++;
score.text = myScore.ToString();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class EnemyShooting : MonoBehaviour
{
public float minDamage = 30;
private Animator anim;
private bool haveShoot = false; //表示一次射击中是否计算过伤害
private PlayerHealth health;
void Awake()
{
anim = GetComponent<Animator>();
health = GameObject.FindGameObjectWithTag(Tags.player).GetComponent<PlayerHealth>();
}
// Update is called once per frame
void Update()
{
if(anim.GetFloat("Shot") > 0.5)
{
Shooting();
}
else
{
haveShoot = false;
}
}
private void Shooting()
{
if(haveShoot == false)
{
//计算伤害
float damage = minDamage + 90 - 9 * (transform.position - health.transform.position).magnitude;
health.TakeDamage(damage);
haveShoot = true;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
namespace 活动选择问题_贪心算法_递归解决
{
class Program
{
static void Main(string[] args)
{
List<int> list = ActivitySelection(1, 11, 0, 24);
foreach (var temp in list)
{
Console.WriteLine(temp);
}
Console.ReadKey();
}
static int[] s = { 0, 1, 3, 0, 5, 3, 5, 6, 8, 8, 2, 12 };
static int[] f = { 0, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14 };
//参数分别为遍历的开始和结束活动的编号、开始和结束的时间
public static List<int> ActivitySelection(int startActivityNumber,int endActivityNumber,int startTime,int endTime)
{
if (startActivityNumber > endActivityNumber || startTime >= endTime)
{
return new List<int>();
}
//找到结束时间最早的活动
int tempNumber = 0;
for(int number = startActivityNumber;number <= endActivityNumber; number++)
{
if (s[number] >= startTime && f[number] <= endTime)
{
tempNumber = number;
break;
}
}
List<int> list = ActivitySelection(tempNumber + 1, endActivityNumber, f[tempNumber], endTime);
list.Add(tempNumber); //list即是满足条件的活动编号
return list;
}
}
}
<file_sep>using System;
using System.Net;
using System.Net.Sockets;
using System.Text;
namespace _6_tcplistener
{
class Program
{
static void Main(string[] args)
{
TcpListener listener = new TcpListener(IPAddress.Parse("172.16.31.10"), 7788); //TcpListener对socket进行了一层封装,会自己创建socket对象
listener.Start(); //开始监听
Console.WriteLine("开始监听");
TcpClient client = listener.AcceptTcpClient(); //等待客户端连接
//Console.WriteLine("一个客户端连接过来");
NetworkStream stream = client.GetStream(); //得到网络流,可以取得客户端发送过来的数据
byte[] data = new byte[1024];
while (true) {
int length = stream.Read(data, 0, 1024); //从网络流中读取数据存在data数组中,length为实际读取的字节数
string message = Encoding.UTF8.GetString(data, 0, length);
Console.WriteLine("收到了消息:" + message);
}
stream.Close();
client.Close();
listener.Stop();
Console.ReadKey();
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using XLua;
using System.IO;
using UnityEngine.Networking;
public class HotFixScript : MonoBehaviour
{
private LuaEnv luaEnv;
public static Dictionary<string, GameObject> prefabDict = new Dictionary<string, GameObject>();
// Start is called before the first frame update
void Awake()
{
luaEnv = new LuaEnv();
luaEnv.AddLoader(MyLoader);
luaEnv.DoString("require 'fish'");
}
private byte[] MyLoader(ref string filePath)
{
string absPath = @"F:\GameResources\CatchFish\FishingJoy\Assets\" + filePath + ".lua.txt";
return System.Text.Encoding.UTF8.GetBytes(File.ReadAllText(absPath));
}
private void OnDisable()
{
luaEnv.DoString("require 'fishDispose'");
}
private void OnDestroy()
{
luaEnv.Dispose();
}
[LuaCallCSharp]
public void LoadResource(string resName,string filePath)
{
StartCoroutine(LoaderResourceCorotine(resName,filePath));
}
IEnumerator LoaderResourceCorotine(string resName,string filePath)
{
UnityWebRequest request = UnityWebRequestAssetBundle.GetAssetBundle(@"http://localhost/AssetBundles/" + filePath);
yield return request.SendWebRequest();
AssetBundle ab = DownloadHandlerAssetBundle.GetContent(request);
GameObject gameObject = ab.LoadAsset<GameObject>(resName);
prefabDict.Add(resName, gameObject);
}
[LuaCallCSharp]
public static GameObject GetGameObject(string goName)
{
return prefabDict[goName];
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.AI;
public class EnemyMoveAI : MonoBehaviour
{
public Transform[] wayPoints;
public float patrolTime = 3f;
public float chaseTime = 3f;
private float patrolTimer = 0;
private float chaseTimer = 0;
private int index = 0;
private NavMeshAgent navAgent;
private EnemySight sight;
private PlayerHealth health;
void Awake()
{
navAgent = GetComponent<NavMeshAgent>();
navAgent.destination = wayPoints[index].position;//导航的目标位置
navAgent.updatePosition = false;
navAgent.updateRotation = false;
sight = GetComponent<EnemySight>();
health = GameObject.FindGameObjectWithTag(Tags.player).GetComponent<PlayerHealth>();
}
// Update is called once per frame
void Update()
{
if (sight.playerInSight && health.hp > 0)
{
Shooting();
}
else if (sight.alertPosition != Vector3.zero && health.hp > 0)
{
Chasing();
}
else
{
Patrolling();
}
}
private void Shooting()
{
//navAgent.Stop();
navAgent.isStopped = true;
}
//巡逻
private void Patrolling()
{
navAgent.isStopped = false;
navAgent.speed = 3;
navAgent.destination = wayPoints[index].position;
navAgent.updatePosition = false;
navAgent.updateRotation = false;
if (navAgent.remainingDistance < 0.01f)
{
patrolTimer += Time.deltaTime;
if (patrolTimer > patrolTime)
{
index++;
index %= 4;
navAgent.destination = wayPoints[index].position;
navAgent.updatePosition = false;
navAgent.updateRotation = false;
patrolTimer = 0;
}
}
}
private void Chasing()
{
navAgent.isStopped = false;
navAgent.speed = 6;
navAgent.destination = sight.alertPosition;
navAgent.updatePosition = false;
navAgent.updateRotation = false;
if(navAgent.remainingDistance < 2f)
{
chaseTimer += Time.deltaTime;
if(chaseTimer > chaseTime)
{
sight.alertPosition = Vector3.zero;
GameController._instance.lastPlayerPosition = Vector3.zero;
GameController._instance.alermOn = false;
}
}
}
}
<file_sep>using System;
namespace _008_快速排序
{
class Program
{
//对数组中索引从left到right之间的数做排序
static void QuickSort(int[] dataArray,int left,int right)
{
if (left < right)
{
int x = dataArray[left]; //基准数
int i = left;
int j = right;
while (i<j) //当i=j时说明找到的中间位置,循环结束
{
//从后往前比较,找一个比x小或者相等的数字,放在位于i位置的坑里
while (i < j)
{
if (dataArray[j] <= x)
{
dataArray[i] = dataArray[j];
break;
}
else
{
j--;
}
}
//从前往后比较,找一个比x大的数字,放在位于j位置的坑里
while (i < j)
{
if (dataArray[i] > x)
{
dataArray[j] = dataArray[i];
break;
}
else
{
i++;
}
}
}
dataArray[i] = x; //现在i=j是中间位置
QuickSort(dataArray, left,i - 1); //递归
QuickSort(dataArray, i + 1, right);
}
}
static void Main(string[] args)
{
int[] data = new int[] { 42, 58, 34, 27, 8, 53, 42 };
QuickSort(data, 0, data.Length - 1);
foreach (var temp in data)
{
Console.WriteLine(temp);
}
Console.ReadKey();
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class FollowPlayer : MonoBehaviour
{
public float moveSpeed = 3;
public float rotateSpeed = 3;
private Vector3 offset;
private Transform player;
void Awake()
{
player = GameObject.FindGameObjectWithTag(Tags.player).transform;
offset = transform.position - player.position;
}
void Update()
{
Vector3 beginPos = player.position + offset;
//在beginPos和player正上方的endPos之间用插值法取三个点,依次判断摄像机位于这五个点处拍摄player是否有视线阻挡
Vector3 endPos = player.position + offset.magnitude * Vector3.up;//magnitude返回向量长度
Vector3 pos1 = Vector3.Lerp(beginPos, endPos, 0.25f);
Vector3 pos2 = Vector3.Lerp(beginPos, endPos, 0.5f);
Vector3 pos3 = Vector3.Lerp(beginPos, endPos, 0.75f);
Vector3[] posArray = new Vector3[] { beginPos, pos1, pos2, pos3, endPos };
Vector3 targetPos = posArray[0];
for(int i = 0; i<5; i++)
{
RaycastHit hitinfo;
if(Physics.Raycast(posArray[i],player.position - posArray[i],out hitinfo))
{
if(hitinfo.collider.tag!= Tags.player)
{
continue;
}
else
{
targetPos = posArray[i];
break;
}
}
else
{
targetPos = posArray[i];
break;
}
}
transform.position = Vector3.Lerp(transform.position, targetPos, Time.deltaTime * moveSpeed);
Quaternion nowRotation = transform.rotation;
transform.LookAt(player.position);//摄像机正对着目标位置
transform.rotation = Quaternion.Lerp(nowRotation,transform.rotation,Time.deltaTime * rotateSpeed);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class LevelManager : MonoBehaviour
{
public static LevelManager lm;
public Transform player;
public GameObject enemy;
public float rateTime = 2f;
private float myTime;
// Start is called before the first frame update
void Awake()
{
lm = this;
}
// Update is called once per frame
void Update()
{
myTime += Time.deltaTime;
if(myTime >= rateTime)
{
Vector2 r = Random.insideUnitCircle.normalized * 30;
Instantiate(enemy,player.position + new Vector3(r.x,0,r.y),Quaternion.Euler(new Vector3(0,Random.Range(0.0f,360.0f),0)));
myTime = 0;
}
}
}
| 650fd9297de2ad91282ddbf3762dfc9d44a9a229 | [
"Markdown",
"C#"
] | 65 | C# | sakurazxq/Game-Scripts | e1a4eb9157d6959e9b72ea616f0e93eb6445ec45 | 8e8691e1911ab620a9c537a7037ba47456ca3ee0 |
refs/heads/master | <file_sep>document.getElementById('button1').addEventListener('click', loadText);
document.getElementById('button2').addEventListener('click', loadJSON);
document.getElementById('button3').addEventListener('click', loadREST);
function loadText() {
fetch('data.txt')
.then(function (response) {
return response.text();
})
.then(function (data) {
console.log(data);
document.getElementById('result').innerHTML = data;
})
.catch(function (error) {
console.log(error);
});
}
function loadJSON() {
fetch('employees.json')
.then(function (response) {
console.log(response);
return response.json();
})
.then(function (data) {
let html = '';
data.forEach(employee => {
html += `<li>${employee.name} - ${employee.job} - ${employee.title}</li>`;
});
document.getElementById('result').innerHTML = html
});
}
function loadREST() {
fetch('https://picsum.photos/list')
.then(function (response) {
return response.json();
})
.then(function (images) {
let html = '';
images.forEach(function (image) {
html += `<li>
<a target="_blank" href="${image.post_url}">View Image </a> ${image.author}
</li>
`
});
document.getElementById('result').innerHTML = html;
})
.catch(function (error) {
console.log(error);
})
}
| c6667db41f779e5f87d477ca950546b45f457538 | [
"JavaScript"
] | 1 | JavaScript | farrdy/FetchAPI | 4cd7af7385ddecb78546bd9f73762fa2550136c0 | c9462d8c5747ad98c72fb30064b3011f00c892da |
refs/heads/master | <repo_name>shudery/magot<file_sep>/scripts/less/find.js
// find all less files
import glob from 'glob';
const pattern = '**/*.less';
export default function find(src) {
return glob.sync(pattern, { cwd: src, absolute: true });
}
| e538d42ab06d6151f14621b15df54817eacfb23e | [
"JavaScript"
] | 1 | JavaScript | shudery/magot | d48d51afb9a81c4aab2d86a27000cf5f636f4a5e | 047fc4de3b7bb983ed16db065f9e3f5ce7442bc7 |
refs/heads/master | <file_sep>package com.amit.petsearch.Interface;
import com.amit.petsearch.Model.MovieDetails;
import com.amit.petsearch.Model.PopularMovies;
import retrofit2.Call;
import retrofit2.http.GET;
import retrofit2.http.Path;
import retrofit2.http.Query;
public interface API {
String BASE_URL = "https://api.themoviedb.org/3/movie/";
@GET("popular")
Call<PopularMovies> getPopularMovies(@Query("api_key") String API_KEY);
@GET("{movie_id}")
Call<MovieDetails> getMovieDetails(@Path("movie_id") String id, @Query("api_key") String API_KEY);
}<file_sep>package com.amit.petsearch.ViewModel;
import android.arch.lifecycle.MutableLiveData;
import android.arch.lifecycle.ViewModel;
import android.util.Log;
import com.amit.petsearch.Interface.API;
import com.amit.petsearch.Model.ApiResponse;
import com.amit.petsearch.Model.MovieDetails;
import com.amit.petsearch.Model.PopularMovies;
import com.amit.petsearch.Utils.APIClient;
import com.amit.petsearch.Utils.Utils;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
public class MovieViewModel extends ViewModel {
private MutableLiveData<ApiResponse> myPopularMovies;
private MutableLiveData<ApiResponse> myMovieDetails;
public MutableLiveData<ApiResponse> getPopularMovie(){
if (myPopularMovies==null){
myPopularMovies = new MutableLiveData<>();
loadPopularMovies();
}
return myPopularMovies;
}
public MutableLiveData<ApiResponse> getMovieDetails(String movieId){
if (myMovieDetails==null){
myMovieDetails = new MutableLiveData<>();
loadMovieDetails(movieId);
}
return myMovieDetails;
}
private void loadMovieDetails(String movieId) {
API api = APIClient.getClient().create(API.class);
Call<MovieDetails> call = api.getMovieDetails(movieId,Utils.API_KEY);
call.enqueue(new Callback<MovieDetails>() {
@Override
public void onResponse(Call<MovieDetails> call, Response<MovieDetails> response) {
Log.d("asdfg","success "+response.body());
if (response.body()!=null){
myMovieDetails.postValue(new ApiResponse(true,"success",response.body()));
}else {
myMovieDetails.postValue(new ApiResponse(false,"something went wrong"));
}
}
@Override
public void onFailure(Call<MovieDetails> call, Throwable t) {
Log.d("asdfg","failure "+t);
myMovieDetails.postValue(new ApiResponse(false,"failure"));
}
});
}
private void loadPopularMovies() {
API api = APIClient.getClient().create(API.class);
Call<PopularMovies> call = api.getPopularMovies(Utils.API_KEY);
call.enqueue(new Callback<PopularMovies>() {
@Override
public void onResponse(Call<PopularMovies> call, Response<PopularMovies> response) {
Log.d("asdf","success "+response.body());
if (response.body()!=null){
myPopularMovies.postValue(new ApiResponse(true,"success",response.body()));
}else {
myPopularMovies.postValue(new ApiResponse(false,"something went wrong"));
}
}
@Override
public void onFailure(Call<PopularMovies> call, Throwable t) {
myPopularMovies.postValue(new ApiResponse(false,"failure"));
}
});
}
}<file_sep>package com.amit.petsearch;
import android.app.ProgressDialog;
import android.arch.lifecycle.Observer;
import android.arch.lifecycle.ViewModelProviders;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.amit.petsearch.Model.ApiResponse;
import com.amit.petsearch.Model.MovieDetails;
import com.amit.petsearch.Utils.Utils;
import com.amit.petsearch.ViewModel.MovieViewModel;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import java.text.MessageFormat;
public class MovieDetailsActivity extends AppCompatActivity {
private String movieId;
private TextView overview;
private TextView duration;
private TextView releaseDate;
private TextView rating;
private TextView genres;
private TextView language;
private TextView budget;
private TextView revenue;
private ImageView poster;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_movie_details);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
if (getIntent()!=null && getIntent().hasExtra("movie_id")){
movieId = getIntent().getStringExtra("movie_id");
Log.d("asdfg","movie id "+movieId);
}
checkInternetConnection();
}
private void checkInternetConnection() {
if (Utils.checkInternetConnection(this)){
initBindViews();
}else {
Toast.makeText(this,getResources().getString(R.string.check_internet),Toast.LENGTH_LONG).show();
}
}
private void initBindViews() {
overview = findViewById(R.id.overview);
duration = findViewById(R.id.duration);
releaseDate = findViewById(R.id.release_date);
rating = findViewById(R.id.rating);
genres = findViewById(R.id.genres);
language = findViewById(R.id.lang);
budget = findViewById(R.id.budget);
revenue = findViewById(R.id.revenue);
poster= findViewById(R.id.banner_poster);
final ProgressDialog progressDialog = new ProgressDialog(this,R.style.MyAlertDialogStyle);
progressDialog.setMessage(getApplicationContext().getResources().getString(R.string.movie_detail));
progressDialog.setCancelable(false);
progressDialog.show();
MovieViewModel viewModel = ViewModelProviders.of(this).get(MovieViewModel.class);
viewModel.getMovieDetails(movieId).observe(this, new Observer<ApiResponse>() {
@Override
public void onChanged(@Nullable ApiResponse apiResponse) {
assert apiResponse != null;
updateUI(apiResponse);
if (progressDialog.isShowing())
progressDialog.dismiss();
}
});
}
private void updateUI(ApiResponse response) {
if (response.isStatus()){
showMovieDetails(response.getMovieDetails());
}else {
Toast.makeText(this,getResources().getString(R.string.something_wrong),Toast.LENGTH_LONG).show();
}
}
private void showMovieDetails(MovieDetails details) {
if (details!=null){
getSupportActionBar().setTitle(details.getTitle());
overview.setText(details.getOverview());
duration.setText(MessageFormat.format("{0} minutes", details.getRuntime()));
releaseDate.setText(Utils.convertTimeFormat(details.getRelease_date()));
rating.setText(String.valueOf(details.getVote_average()));
genres.setText(Utils.getGenresToString(details.getGenres()));
language.setText(details.getOriginal_language());
budget.setText(Utils.getBudget(details.getBudget()));
revenue.setText(Utils.getRevenue(details.getRevenue()));
setPosterImage(details.getBackdrop_path());
}
}
private void setPosterImage(String posterUrl) {
if (posterUrl!=null){
Glide.with(this)
.load(Utils.BANNER_URL+posterUrl)
.diskCacheStrategy(DiskCacheStrategy.ALL)
.placeholder(R.drawable.spinner_loader).into(poster);
}else {
poster.setImageDrawable(getResources().getDrawable(R.drawable.image_not_available));
}
}
}
<file_sep>package com.amit.petsearch;
import android.app.ProgressDialog;
import android.arch.lifecycle.Observer;
import android.arch.lifecycle.ViewModelProviders;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.Toast;
import com.amit.petsearch.Adapter.MovieAdapter;
import com.amit.petsearch.Model.ApiResponse;
import com.amit.petsearch.Utils.Utils;
import com.amit.petsearch.ViewModel.MovieViewModel;
public class MainActivity extends AppCompatActivity {
private RecyclerView recyclerView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
getSupportActionBar().setTitle(getResources().getString(R.string.popular_movies));
checkInternetConnection();
}
private void checkInternetConnection() {
if (Utils.checkInternetConnection(this)){
initBindViews();
}else {
Toast.makeText(this,getResources().getString(R.string.check_internet),Toast.LENGTH_LONG).show();
finishAffinity();
}
}
private void initBindViews() {
recyclerView = findViewById(R.id.recycler_view);
RecyclerView.LayoutManager layoutManager = new LinearLayoutManager(this);
recyclerView.setLayoutManager(layoutManager);
final ProgressDialog progressDialog = new ProgressDialog(this,R.style.MyAlertDialogStyle);
progressDialog.setMessage(getApplicationContext().getResources().getString(R.string.fetch_data));
progressDialog.setCancelable(false);
progressDialog.show();
MovieViewModel viewModel = ViewModelProviders.of(this).get(MovieViewModel.class);
viewModel.getPopularMovie().observe(this, new Observer<ApiResponse>() {
@Override
public void onChanged(@Nullable ApiResponse apiResponse) {
assert apiResponse != null;
updateUI(apiResponse);
if (progressDialog.isShowing())
progressDialog.dismiss();
}
});
}
private void updateUI(ApiResponse response) {
if(response.isStatus()){
MovieAdapter mAdapter = new MovieAdapter(response.getPopularMovies().getResults(), this);
recyclerView.setAdapter(mAdapter);
}else {
Toast.makeText(this,getResources().getString(R.string.something_wrong),Toast.LENGTH_LONG).show();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_item, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.search:
Toast.makeText(this,"clicked on search",Toast.LENGTH_LONG).show();
return true;
case R.id.plcament:
Toast.makeText(this,"clicked on plcament",Toast.LENGTH_LONG).show();
return true;
default:
return super.onContextItemSelected(item);
}
}
}
| dfba7ffb734a8c3990b43b5a74cb9a7ff5a6faf3 | [
"Java"
] | 4 | Java | izydevs/petsearch | cc0294cf691b075fb547aeee6fb01f4079441c1f | 2438856a0214efa75142a9fe9b7b5c0dcd349c06 |
refs/heads/master | <file_sep>import React, { Component } from 'react';
import Header from './common/header.component';
import Footer from './common/footer.component';
import Hero from './hero.component';
import How from './how.component';
import Peek from './peek.component';
const staticData = {
"heroData": {
"title": "Be data driven about hating your job",
"body": ["rate & track each and every day to know when it is time for a change."],
"button":"Start Tracking Now",
"style": "jumbotron jumbo-odd"
},
"whyData": {
"title": "Why?",
"body": [
"We started company X in 2018 when we realized that we had no real way to judge how bad our days " +
"really were. Everyday seemed bad… but did that mean they were all just average. ",
"We ask you to track your day on a 7 point scale - science shows that raing on a scale of 7 will " +
"give you more accurate, un-average data. You define for yourself what 1 means and what 7 means " +
"(or pick from a number of themes we’ve thought up for you!), so when you rate your day you remember " +
"what the scale is.",
"You can look back at your data at any time and compare yourself to everyone else, set alarms to " +
"trigger if you have too many bad days in a row and many other new features we are constantly " +
"experimenting with!"],
"button":"Start Today, Get the APP",
"style": "jumbotron jumbo-odd"
},
"howData": {
"title": "Here's how",
"images": [
{
"imageURL": "./images/star.png",
"imageText": "We'll ask you to rate how bad each of your days is"
},
{
"imageURL": "./images/arrow.png",
"imageText": "Look back every week, every month, every year"
},
{
"imageURL": "./images/circle.png",
"imageText": "Compare yourself to others in your industry or in your role"
}
],
"style": "jumbotron jumbo-even"
},
"corpHow": {
"title": "Here's how for Corporations",
"images": [
{
"imageURL": "./images/star.png",
"imageText": "Sign up for an enterprise account to start tracking your employees happiness"
},
{
"imageURL": "./images/arrow.png",
"imageText": "Employees will login with their company email and be asked to rate their days of work"
},
{
"imageURL": "./images/circle.png",
"imageText": "Data is annonymized for you to track by level, function or group"
}
],
"style": "jumbotron jumbo-even"
},
"peek": {
"title": "Take a Peek",
"images": [
{
"imageURL": "./images/square.png"
},
{
"imageURL": "./images/square.png"
}
],
"button": "Yes. I'm ready!",
"style": "jumbotron jumbo-odd"
}
};
export default class Index extends Component {
render() {
return (
<div className="index">
<Header />
<Hero data={staticData.heroData}/>
<How data={staticData.howData}/>
<Hero data={staticData.whyData}/>
<How data={staticData.corpHow}/>
<Peek data={staticData.peek}/>
<Footer />
</div>
);
}
}<file_sep>import React, { Component } from 'react';
import 'bootstrap-material-design/dist/css/bootstrap-material-design.min.css';
import './components/css/app.css';
import { BrowserRouter as Router, Switch, Route } from 'react-router-dom';
import Quiz from './components/quiz.component';
import Index from './components/index.component';
class App extends Component {
render() {
return (
<Router>
<div className="container-fluid">
<Switch>
<Route exact path='/' component={ Index } />
<Route exact path='/howdy' component={ Quiz } />
</Switch>
</div>
</Router>
);
}
}
export default App;
<file_sep>import React, { Component } from 'react';
import 'bootstrap-material-design/dist/css/bootstrap-material-design.min.css';
import Slider from 'rc-slider';
import 'rc-slider/assets/index.css';
import './css/howdy.css';
import Header from './common/header.component';
import Footer from './common/footer.component';
const marks = {
1: {
style: {
color: 'green'
},
label: <strong>1</strong>
},
2: '2',
3: '3',
4: '4',
5: '5',
6: '6',
7: {
style: {
color: 'red'
},
label: <strong>7</strong>
}
};
function log(value) {
console.log("slider change: " + value);
};
export default class Quiz extends Component {
constructor() {
super();
this.state = {
width: window.innerWidth,
};
}
componentWillMount() {
window.addEventListener('resize', this.handleWindowSizeChange);
}
// make sure to remove the listener
// when the component is not mounted anymore
componentWillUnmount() {
window.removeEventListener('resize', this.handleWindowSizeChange);
}
handleWindowSizeChange = () => {
this.setState({ width: window.innerWidth });
};
render() {
const { width } = this.state;
const isMobile = width <= 500;
// the rest is the same...
if (isMobile) {
return (
<p> This is a mobile view</p>
);
} else {
return (
<div className="index">
<Header />
<div className="day-box">
<div className="row">
<div className="col-12" align="center">
<h2>How bad was your day?</h2>
</div>
</div>
<form>
<div className="form-group row">
<div className="col-12" align="center">
<Slider dots min={1} max={7} marks={marks} step={1} onChange={log} defaultValue={4} />
</div>
</div>
<div className="form-group row">
<div className="col-12" align="center">
<input type="checkbox" name="escalation" value="escalation"/>
<label className="label-text">Check if there was an escalation today</label>
</div>
</div>
<div className="form-group row">
<div className="col-12" align="right">
<input type="submit" value="SUBMIT" className="btn btn-large orange-background button-text day-btn"/>
</div>
</div>
</form>
</div>
<Footer />
</div>
);
}
}
}
<file_sep>import React, { Component } from 'react';
import 'bootstrap-material-design/dist/css/bootstrap-material-design.min.css';
import './css/howdy.css';
class Peek extends Component {
render() {
return (
<div className={this.props.data.style}>
<h1 className="display-4 center">{this.props.data.title}</h1>
<hr className="my-4 center"/>
<p className="center">
{this.props.data.images.map(function(image, index) {
return <img className="how-image img-fluid" src={image.imageURL} key={index} height="200" width="200"></img>
})}
</p>
<p className="lead center">
<a className="btn btn-primary btn-lg btn-outline " href="#" role="button">{this.props.data.button}</a>
</p>
</div>
);
}
}
export default Peek;<file_sep>import React, { Component } from 'react';
import 'bootstrap-material-design/dist/css/bootstrap-material-design.min.css';
import './css/howdy.css';
class Hero extends Component {
render() {
return (
<div className={this.props.data.style}>
<h1 className="display-4 center">{this.props.data.title}</h1>
<hr className="my-4 center"/>
{this.props.data.body.map(function(para, index) {
return <p className="center" key="index"> {para}</p>
})}
<p className="lead center">
<a className="btn btn-primary btn-lg btn-outline " href="#" role="button">{this.props.data.button}</a>
</p>
</div>
);
}
}
export default Hero;
| b43a759f4bac92a2e3d6a3a9148cdc512f96f0ec | [
"JavaScript"
] | 5 | JavaScript | hollowmatt/howdy | 293259fb9e43c850f12592c0c91f89c99840154f | 79919a78b2b325806941dcd7c957369cfd983aa5 |
refs/heads/master | <repo_name>mkovacs/openvr-texture-problem<file_sep>/src/vr_app.rs
use crate::renderer;
use glium;
use glium::backend::glutin::glutin::GlRequest;
use glium::GlObject;
use openvr;
use std::rc::Rc;
pub struct VrApp {
display: Rc<glium::Display>,
left_eye: VrEye,
right_eye: VrEye,
renderer: renderer::Renderer,
vr_device: VrDevice,
}
impl VrApp {
pub fn new(event_loop: &glium::glutin::event_loop::EventLoop<()>) -> VrApp {
let window_builder = glium::glutin::window::WindowBuilder::new()
.with_inner_size(glium::glutin::dpi::LogicalSize::new(256.0, 256.0));
let context = glium::glutin::ContextBuilder::new()
.with_gl(GlRequest::Specific(glium::glutin::Api::OpenGl, (3, 2)))
.with_gl_profile(glium::glutin::GlProfile::Core)
.with_gl_robustness(glium::glutin::Robustness::RobustLoseContextOnReset)
.build_windowed(window_builder, &event_loop)
.unwrap();
let display: Rc<glium::Display> = Rc::new(glium::Display::from_gl_window(context).unwrap());
println!("OpenGL vendor: {}", display.get_opengl_vendor_string());
println!("OpenGL renderer: {}", display.get_opengl_renderer_string());
println!("OpenGL version: {}", display.get_opengl_version_string());
let device: VrDevice = {
let context = unsafe {
openvr::init(openvr::ApplicationType::Scene).expect("Failed to initialize OpenVR")
};
let system = context.system().expect("Failed to get system interface");
let compositor = context
.compositor()
.expect("Failed to create IVRCompositor subsystem");
system
.device_to_absolute_tracking_pose(openvr::TrackingUniverseOrigin::Standing, 0.005);
VrDevice {
context: context,
system: system,
compositor: compositor,
}
};
let target_size = device.system.recommended_render_target_size();
println!("target size: {:?}", target_size);
let left_eye = VrEye::new(&display, target_size);
let right_eye = VrEye::new(&display, target_size);
let renderer = renderer::Renderer::new(&display);
VrApp {
display: display,
left_eye: left_eye,
right_eye: right_eye,
renderer: renderer,
vr_device: device,
}
}
pub fn run(self, event_loop: glium::glutin::event_loop::EventLoop<()>) {
let mut is_submit_enabled = false;
event_loop.run(move |glutin_event, _target, control_flow| {
*control_flow = glium::glutin::event_loop::ControlFlow::Poll;
self.vr_device
.compositor
.wait_get_poses()
.expect("Getting poses");
let mut buffer = self.display.as_ref().draw();
self.renderer.render_test(&mut buffer);
buffer.finish().unwrap();
if is_submit_enabled {
// if this block is executed, the texture will read all black in the shader
unsafe {
self.left_eye.submit(&self.vr_device.compositor, openvr::Eye::Left);
self.right_eye.submit(&self.vr_device.compositor, openvr::Eye::Right);
}
}
match glutin_event {
glium::glutin::event::Event::WindowEvent { event, .. } => match event {
glium::glutin::event::WindowEvent::CloseRequested => {
*control_flow = glium::glutin::event_loop::ControlFlow::Exit;
},
glium::glutin::event::WindowEvent::MouseInput {..} => {
is_submit_enabled = true;
},
_ => {},
},
_ => {},
}
});
}
}
struct VrDevice {
// NOTE(mkovacs): The context must be kept around, otherwise other fields become invalid.
#[allow(dead_code)]
context: openvr::Context,
system: openvr::System,
compositor: openvr::Compositor,
}
struct VrEye {
color: glium::Texture2d,
}
impl VrEye {
fn new(display: &Rc<glium::Display>, size: (u32, u32)) -> VrEye {
let color = glium::texture::Texture2d::empty_with_format(
display.as_ref(),
glium::texture::UncompressedFloatFormat::U8U8U8U8,
glium::texture::MipmapsOption::NoMipmap,
size.0,
size.1,
)
.unwrap();
VrEye {
color: color,
}
}
unsafe fn submit(&self, compositor: &openvr::Compositor, eye: openvr::Eye) {
compositor
.submit(
eye,
&openvr::compositor::texture::Texture {
handle: openvr::compositor::texture::Handle::OpenGLTexture(
self.color.get_id() as usize
),
color_space: openvr::compositor::texture::ColorSpace::Auto,
},
None,
None,
)
.expect("Submitting frame");
}
}
<file_sep>/README.md
# openvr-texture-problem
Demonstrates a problem with texturing that appears using `openvr` and `glium`
## Experimental Setup
The executable opens a main window, then initializes OpenGL and OpenVR.
It then creates a program, a vertex and an index buffer for a full-screen quad, and a red-blue checker pattern texture.
In the main loop, it queries OpenVR for poses, then draws the full-screen quad to the main window.
So the main window looks like this:

## Symptom of the Problem
To enable submitting frames to OpenVR, click anywhere on the window.
As soon as frames are submitted, the red-blue checker pattern texture becomes invalid in some way.
In subsequent frames, the main window looks like this:

## Details
- Rust toolchain version: 1.46.0
- `openvr` version: 0.6.0
- `glium` version: 0.27.0
- OpenGL vendor: NVIDIA Corporation
- OpenGL renderer: GeForce GTX 1070 Ti/PCIe/SSE2
- OpenGL version: 3.1.0 NVIDIA 432.00
- OS: Windows 10
- Headset: Oculus Rift
<file_sep>/Cargo.toml
[package]
name = "openvr-texture-problem"
version = "0.0.1"
authors = ["<NAME> <<EMAIL>>"]
edition = "2018"
[dependencies]
glium = "0.27.0"
openvr = "0.6.0"
# The development profile, used for `cargo build`.
[profile.dev]
opt-level = 0 # controls the `--opt-level` the compiler builds with
debug = true # controls whether the compiler passes `-C debuginfo`
# a value of `true` is equivalent to `2`
rpath = false # controls whether the compiler passes `-C rpath`
lto = false # controls `-C lto` for binaries and staticlibs
debug-assertions = true # controls whether debug assertions are enabled
codegen-units = 1 # controls whether the compiler passes `-C codegen-units`
# `codegen-units` is ignored when `lto = true`
panic = 'unwind' # panic strategy (`-C panic=...`), can also be 'abort'
<file_sep>/src/renderer.rs
use glium;
use glium::uniform;
use std::rc::Rc;
const IMAGE_SIZE: usize = 512;
#[derive(Copy, Clone)]
pub struct Vertex {
pub position: [f32; 2],
}
glium::implement_vertex!(Vertex, position);
pub struct Renderer {
program: glium::Program,
fullscreen_quad_vertex_buffer: glium::VertexBuffer<Vertex>,
fullscreen_quad_index_buffer: glium::IndexBuffer<u32>,
texture: glium::texture::Texture2d,
}
impl Renderer {
pub fn new(display: &Rc<glium::Display>) -> Renderer {
let program = glium::Program::from_source(
display.as_ref(),
VERTEX_SHADER_SRC,
FRAGMENT_SHADER_SRC,
None,
)
.unwrap();
let quad_vertex_data = vec![
Vertex {
position: [0.0, 0.0],
},
Vertex {
position: [1.0, 0.0],
},
Vertex {
position: [0.0, 1.0],
},
Vertex {
position: [1.0, 1.0],
},
];
let vertex_buffer = glium::VertexBuffer::new(display.as_ref(), &quad_vertex_data).unwrap();
let quad_index_data = vec![0, 1, 2, 1, 2, 3];
let index_buffer = glium::IndexBuffer::new(
display.as_ref(),
glium::index::PrimitiveType::TrianglesList,
&quad_index_data,
)
.unwrap();
// create texture
let pixel_data = compute_red_blue_checker_texture(IMAGE_SIZE);
let image = glium::texture::RawImage2d::from_raw_rgba_reversed(
&pixel_data,
(IMAGE_SIZE as u32, IMAGE_SIZE as u32),
);
let texture = glium::texture::Texture2d::new(display.as_ref(), image).unwrap();
Renderer {
program: program,
fullscreen_quad_vertex_buffer: vertex_buffer,
fullscreen_quad_index_buffer: index_buffer,
texture: texture,
}
}
pub fn render_test<S: glium::Surface>(&self, buffer: &mut S) {
// clear the screen to green
buffer.clear_color_and_depth((0.0, 1.0, 0.0, 1.0), 1.0);
let parameters = glium::DrawParameters {
backface_culling: glium::draw_parameters::BackfaceCullingMode::CullingDisabled,
depth: glium::Depth {
test: glium::DepthTest::Overwrite,
write: true,
..Default::default()
},
..Default::default()
};
let texture_sampler = self
.texture
.sampled()
.wrap_function(glium::uniforms::SamplerWrapFunction::Repeat)
.minify_filter(glium::uniforms::MinifySamplerFilter::Nearest)
.magnify_filter(glium::uniforms::MagnifySamplerFilter::Nearest);
let uniforms = uniform! {
checker_texture: texture_sampler,
};
buffer
.draw(
&self.fullscreen_quad_vertex_buffer,
&self.fullscreen_quad_index_buffer,
&self.program,
&uniforms,
¶meters,
)
.unwrap();
}
}
fn compute_red_blue_checker_texture(size: usize) -> Vec<u8> {
// compute red and blue checkerboard pattern
let mut pixel_data: Vec<u8> = Vec::with_capacity(4 * size * size);
for y in 0..size {
for x in 0..size {
let (r, g, b) = if (2 * x < size) ^ (2 * y < size) {
(255, 0, 0)
} else {
(0, 0, 255)
};
pixel_data.push(r);
pixel_data.push(g);
pixel_data.push(b);
pixel_data.push(0);
}
}
pixel_data
}
const VERTEX_SHADER_SRC: &str = r#"
#version 140
in vec2 position;
out vec2 v_texcoord;
void main() {
gl_Position = vec4(mix(vec2(-1.0, -1.0), vec2(1.0, 1.0), position), 0.0, 1.0);
v_texcoord = position;
}
"#;
const FRAGMENT_SHADER_SRC: &str = r#"
#version 140
uniform sampler2D checker_texture;
in vec2 v_texcoord;
out vec4 color;
void main() {
color = texture(checker_texture, v_texcoord);
}
"#;
<file_sep>/src/main.rs
mod renderer;
mod vr_app;
fn main() {
let event_loop = glium::glutin::event_loop::EventLoop::new();
let app = vr_app::VrApp::new(&event_loop);
app.run(event_loop);
}
| 1d9073a2a95d8bc2a6f54edb4ee49e2e489c96e8 | [
"Markdown",
"Rust",
"TOML"
] | 5 | Rust | mkovacs/openvr-texture-problem | ab2d167245d32ceb4b6c268a198eec099b446bdd | 0281757e3c1d8c91d3c8c47bfe2d781948633100 |
refs/heads/main | <file_sep>import copy as cp
import numpy as np
from math import sqrt, pow, degrees
class Point3D():
def __init__(self, coordinate):
if isinstance(coordinate, list):
self.x = coordinate[0]
self.y = coordinate[1]
self.z = coordinate[2]
else:
self = cp.deepcopy(coordinate)
def computeAngle(self, parent):
vectorPC = [self.x - parent.x, self.y - parent.y]
vectorPy = [0, 1 - parent.y]
unitVecPC = vectorPC / np.linalg.norm(vectorPC)
unitVecPy = vectorPy / np.linalg.norm(vectorPy)
dot_product = np.dot(unitVecPC, unitVecPy)
angle = np.arccos(dot_product) * 1.5
angle = degrees(angle)
return angle
<file_sep># Data
To experiment our ideas, we use a json file to declare objects in the environnement.
In real use condition, the Object pose are send via a rosservice to our Package which compute the good solution. And then send to another package the list of object to move via rosservice.
# TODO
Ajouter le plateau dans le fichier json ainsi que la taille des objets
<file_sep>import copy as cp
from utils.Zone import *
class FreeZone(Zone):
def __init__(self, name, coordinate, size):
super().__init__(name, coordinate, size)
def moveParent(self):
self._parent.x = self.x
self._parent.y = self.y
self._parent.z = self.z
print("id before : ", id(self._parent))
self._parent = cp.deepcopy(self._parent)
print("id after : ", id(self._parent))
<file_sep>from utils.Zone import *
class Node(Zone):
def __init__(self, name='', coordinate=[], isGoal=False, size=0):
"""This class is used to represent a node. A node is an object on the shelf. Nodes are created when parsing json file.
Args:
name (str, optional): [name of the object (tag)]. Defaults to ''.
coordinate (list, optional): [coordinate of the object in the environment]. Defaults to [].
isGoal (bool, optional): [set to True if the object is the one we want to reach]. Defaults to False.
size (int, optional): [size of the object (radius size)]. Defaults to 0.
child ([type = Node], optional): [child of the object see method for more]. Defaults to None.
parent ([type = list(Node)], optional): [list of node referenced as parent of the actual node]. Defaults to None.
"""
super().__init__(name, [coordinate[0]['x'],
coordinate[0]['y'], coordinate[0]['z']], size)
self.__isGoal = isGoal
# Freezone assign to the Node
self.freeZone = None
def setGoal(self):
self.__isGoal = True
def isGoal(self):
return self.__isGoal
def __str__(self):
"""Equivalent of toString(). it permits to display all the info concerning the node
"""
rep = "Object :\n"
rep += " name: " + self.name + "\n"
rep += " size: " + str(self.size) + "\n"
rep += " isGoal: " + str(self.__isGoal) + "\n\n"
if(self._child):
for child in self._child:
rep += " child: " + child.name + "\n"
else:
rep += " child: " + str(self._child) + "\n"
if(self._parent):
rep += " parent: " + self._parent.name + "\n"
else:
rep += " parent: " + str(self._parent) + "\n"
if(self.freeZone):
rep += " freeZone: " + self.freeZone.name + "\n"
else:
rep += " freeZone: " + str(self.freeZone) + "\n"
return rep
<file_sep>import json
import random
from utils.Nodes import *
class Data():
def __init__(self, filename=""):
self.filename = filename
def parseFile(self):
"""This method parse the json file describing the grid into a python array
Args:
grid_array ([Node]): array of Nodes representing the grid
"""
grid_array = []
with open(self.filename) as json_file:
points = json.load(json_file)
for point in points:
grid_array.append(
Node(point["name"], point["coordinate"], point["isGoal"], point["size"]))
return grid_array
<file_sep># appenv
App for the simulator
<file_sep>import math
import copy as cp
import solver
from utils.FreeZone import *
class PlaceFinder():
"""The goal of this class is to create a list of potential places where to move an object with a given size
"""
def __init__(self, graph, shelf_size_x, shelf_size_y, precision):
self.shelf_size_x = shelf_size_x
self.shelf_size_y = shelf_size_y
self.precision = precision
self.graph = graph
def findPlace(self, current):
diameter_object = current.size
notfind = False
findplace = False
box = [[0] * 2 for _ in range(4)]
""" Liste des positions trouvé"""
list_pos = []
""" Centre d'une position trouvé """
center = [1, 2]
"""fisrt point of the virtual box """
box[0][0] = 0
box[0][1] = 0
"""second point of the virtual box """
box[1][0] = 0
box[1][1] = diameter_object
"""third point of the virtual box """
box[2][0] = diameter_object
box[2][1] = diameter_object
"""fourth point of the virtual box """
box[3][0] = diameter_object
box[3][1] = 0
while (notfind != True):
if self.emptyArea(box):
findplace = self.emptyArea(box)
else:
findplace = False
if findplace:
center[0] = (box[0][0] + box[3][0])/2
center[1] = (box[0][1] + box[1][1])/2
list_pos.append(cp.deepcopy(center))
if (box[1][1]+self.precision < self.shelf_size_y):
box[0][1] += self.precision
box[1][1] += self.precision
box[2][1] += self.precision
box[3][1] += self.precision
elif (box[3][0]+self.precision < self.shelf_size_x):
box[0][1] = 0
box[1][1] = diameter_object
box[2][1] = diameter_object
box[3][1] = 0
box[0][0] += self.precision
box[1][0] += self.precision
box[2][0] += self.precision
box[3][0] += self.precision
else:
notfind = True
newList = []
for i, point in enumerate(list_pos):
newFreeZone = FreeZone(
"FreeZone" + current.name + str(i), [point[0], point[1], 0], diameter_object)
newList.append(newFreeZone)
return newList
def emptyArea(self, box):
empty_area = True
for point in self.graph:
if (point.x < box[3][0] and point.x > box[0][0] and point.y < box[1][1] and point.y > box[0][1]):
empty_area = False
for j in range(1, int(point.size/20)):
for i in range(0, 10):
angle = math.radians(i*36)
rayon = j * 10
point_x = rayon * math.cos(angle) + point.x
point_y = rayon * math.sin(angle) + point.y
if (point_x < box[3][0] and point_x > box[0][0] and point_y < box[1][1] and point_y > box[0][1]):
empty_area = False
return empty_area
<file_sep>from utils.Point3D import *
from math import inf
class Zone(Point3D):
def __init__(self, name, coordinate, size):
super().__init__(coordinate)
self.name = name
self.size = size
self._child = []
self._parent = None
self.gAngleCost = 0
self.gDistCost = 0
self.gObjectCost = 0
self.hCost = 0
self.functionValue = 0
def resetChild(self):
self._child = []
def setChild(self, nodeChild):
self._child.append(nodeChild)
def getChild(self):
return self._child
def isGoal(self):
return False
def resetParent(self):
self._parent = None
def setParent(self, parentChild):
self._parent = parentChild
def getParent(self):
return self._parent
def computeHcost(self, goal):
self.hCost = self.getDistanceToNode(goal)
return self.hCost
def computeVirtualGcost(self, parent):
d = parent.gDistCost + self.getDistanceToNode(parent)
a = self.computeAngle(parent)
a += parent.gAngleCost
o = parent.gObjectCost + 1000
return d + a + o
def getGCost(self):
return self.gDistCost + self.gAngleCost
def upgateGcost(self, parent):
# here we can take in acount the mecanical constraints of the robot arm
# to do so, we can add to the distance the angle between the two nodes
self.gDistCost = parent.gDistCost + \
self.getDistanceToNode(parent) # distance
angle = self.computeAngle(parent)
self.gAngleCost = parent.gAngleCost + angle
self.gObjectCost = parent.gObjectCost + 1000
return self.gDistCost + self.gAngleCost + self.gObjectCost
def isAtTheSamePositionAs(self, node):
if self.x == node.x:
if self.y == node.y:
return True
return False
def updateFunctionValue(self):
self.functionValue = self.hCost + self.gDistCost + self.gAngleCost
def getFunctionValue(self):
self.functionValue = self.hCost + self.gDistCost + self.gAngleCost
return self.functionValue
def getDistanceTo(self, datapoint):
return sqrt(pow((datapoint[0]-self.x), 2) + pow((datapoint[1]-self.y), 2) + pow((datapoint[2]-self.z), 2))
def getDistanceToNode(self, node):
return sqrt(pow((node.x-self.x), 2) + pow((node.y-self.y), 2) + pow((node.z-self.z), 2))
def getClosestZoneFromList(self, liste):
max_dist = inf
b_node = None
for element in liste:
if max_dist > self.getDistanceToNode(element):
max_dist = self.getDistanceToNode(element)
b_node = element
return b_node, max_dist
def __str__(self):
"""Equivalent of toString(). it permits to display all the info concerning the node
"""
rep = "Zone :\n"
rep += " name: " + self.name + "\n"
rep += " size: " + str(self.size) + "\n"
if(self._child):
for child in self._child:
rep += " child: " + child.name + "\n"
else:
rep += " child: " + str(self._child) + "\n"
if(self._parent):
rep += " parent: " + self._parent.name + "\n"
else:
rep += " parent: " + str(self._parent) + "\n"
return rep
<file_sep>from utils.Zone import *
class RobotArm(Zone):
def __init__(self, x, y, z=0, name=" "):
super().__init__("RobotArm - " + name, [x, y, z], 150)
def __str__(self):
rep = "RobotArm :\n"
rep += " Pose (x,y,z): ("+str(self.x)+"," + \
str(self.y)+","+str(self.z)+")\n"
return rep
<file_sep># Shelf objects grasping
This project aims to find the best strategy to reach an object placed on a shelf while avoiding other objects that could be placed around it on the same shelf.
# How to use
Important : Project running on python3 (install libraries or use venv)
## Virtual Environment
* Window
In the PowerShell enter :
```
env\Scripts\activate.bat
```
* Unix/MacOS
```shell
source env/bin/activate
```
Then you can run the code from source of the project folder by entering the following command :
```shell
(env) computerName$ python3 script/graph/main.py
```
## requirements.txt
If python3.x is already installed on your computer you can use the following command to install dependencies.
```shell
$ pip install -r requirements.txt
```
<file_sep>import numpy as np
import sys
import copy as cp
from math import inf
from solver.PlaceFinder import *
from utils.RobotArm import *
from utils.Nodes import *
from utils.FreeZone import *
from solver.AStar import *
class Solver(PlaceFinder):
def __init__(self, shelf_size_x, shelf_size_y, precision, graph, goal):
""" This class permits to find the right strategy to reach the goal object without touching any other object.
Args:
shelf_size_x (int): x size of the shelf in mm
shelf_size_y (int): y size of the shelf in mm
precision (int): number of mm per pixel
"""
self.__graph = graph
self.goal = goal
self.__shelf_size_x = shelf_size_x
self.shelf_size_y = shelf_size_y
self.__objectRadiusProximity = 75 # defined depending on the arm's size
self.precision = precision
super().__init__(self.__graph, shelf_size_x,
shelf_size_y, precision)
def getSucessors(self, currentNode):
""" Cette methode permet de generer le graph en fonction des objets dans l'espace
Un noeud ne peut avoir qu'un fils par contre il peut avoir plusieurs parents.
"""
if isinstance(currentNode, RobotArm):
for node in self.__graph:
if node.name[:-1] == "RobotArm-Path-Point":
new_node = cp.deepcopy(node)
new_node.resetChild()
currentNode.setChild(new_node)
else:
for node in self.__graph:
if node.name is not currentNode.name:
if not self.__isCollide(currentNode, node):
new_node = cp.deepcopy(node)
new_node.resetChild()
currentNode.setChild(new_node)
return currentNode.getChild()
def defineObjectToMove(self, robotArm, algo_name, occurence_test=True):
"""Cette methode definit la liste des objets a bouger pour atteindre l'object goal.
En verifiant auparavant si l'objet goal ne peut pas etre atteint directement.
"""
objectsToMove = []
solution = None
nb_iterations = 0
if algo_name == "BFS":
solution, nb_iterations = self.breath_first_search(
robotArm, occurence_test=occurence_test)
elif algo_name == "DFS":
solution, nb_iterations = self.depth_first_search(
robotArm, occurence_test=occurence_test)
elif algo_name == "A*":
AStarSolver = AStar(robotArm, self.goal, self)
solution, nb_iterations = AStarSolver.solve()
if solution:
print("Solver : Solution found")
objectsToMove.append(solution)
parent = solution.getParent()
while parent:
objectsToMove.append(parent)
parent = parent.getParent()
objectsToMove.reverse()
self.newPoseObjectToMove(objectsToMove)
else:
print("Solver : No Solution found")
return objectsToMove, nb_iterations
def breath_first_search(self, robotArm, occurence_test=True):
"""This function represents the BFS algorithm
Args:
robotArm (RobotArm): inital pose of the robot
occurence_test (bool, optional): Permits to activate or not the occurence test. Defaults to True.
Returns:
[tuple]: final_state,iterations
"""
frontier = list() # open list
explored = [] # closed list
frontier.append(robotArm) # add the first node
i = 0 # iterations counter
while frontier: # while frontier isn't empty
# pop the first elemeent of the frontier list
state = frontier.pop(0)
if state.isGoal(): # if current state is goal then stop
return state, i # return state ans iterations counter
children = self.getSucessors(state) # else generate successors
for child in children: # for every successors
if not occurence_test or (child.name not in explored):
child.setParent(state) # for successor set parent
frontier.append(child) # add successor to frontier list
if occurence_test: # if occurence test then
# add successor to explored list
explored.append(child.name)
i += 1 # increment interation counter
return None # if no solution is found the result None
def depth_first_search(self, robotArm, occurence_test=True):
"""This function represents the DFS algorithm
The main difference with the BFS method appears in the list gestion.
In BFS child are added at the end of the frontier list and are poped with the index 0.
In contrary DSF pop the last element of the list and add child at the first position in the frontier list.
Args:
robotArm (RobotArm): inital pose of the robot
occurence_test (bool, optional): Permits to activate or not the occurence test. Defaults to True.
Returns:
[tuple]: final_state,iterations
"""
frontier = list()
explored = []
frontier.append(robotArm)
i = 0
while frontier:
state = frontier.pop(0) # pop first element of the frontier list
if state.isGoal():
return state, i
children = self.getSucessors(state)
for child in children:
if not occurence_test or (child.name not in explored):
child.setParent(state)
# add child at the first position in frontier list
frontier.insert(0, child)
if occurence_test:
explored.append(child.name)
i += 1
return None
def __isCollide(self, starting_node, ending_node):
""" This method is based on the Bresenham algorithm
This algorithm is well known for drawing lines between two points in a grid.
In our case, we use this algorithm to link two poses with a line and for each cell of this line, we check if the arm can pass without touching any other object.
Args:
starting_node (Node or RobotArm):
ending_node (Node):
"""
x, y = int(starting_node.x), int(starting_node.y)
# Line direction definition
if(x < ending_node.x):
sx = 1 # incrementation
else:
sx = -1
if(y < ending_node.y):
sy = 1
else:
sy = -1
# Define distance in x and y
dx = abs(x - int(ending_node.x))
dy = abs(y - int(ending_node.y))
# Define error value
e = dx - dy
# while the computed point isn't the final point iterate
while(x != int(ending_node.x) or y != int(ending_node.y)):
e2 = e * 2
# compute the next pixel to visit based on the current position and error
if e2 > - dy:
e -= dy
x += sx
if e2 < dx:
e += dx
y += sy
if(x != int(ending_node.x) or y != int(ending_node.y)):
# get the closest node from actual virtual line point
node, distanceToClosestNode = self.__getDistanceToClosestObjectsFromPoint([
x, y, 0])
# the detected node is not corresponding to the starting or ending node and the computed distance is under a given radius then there is a colision so return True
if node:
if(node.name is not ending_node.name and node.name is not starting_node.name):
if distanceToClosestNode < self.__objectRadiusProximity:
return True
# Otherwise return False
return False
def __getDistanceToClosestObjectsFromPoint(self, point):
"""This method permits to find the closest object to a given one.
Args:
point (Node)
Returns:
[Node, float]: closest_node, min_dist
"""
closest_node = None
min_dist = inf
for obj in self.__graph:
distance = obj.getDistanceTo(point) + obj.size/2
if(min_dist > distance):
closest_node = obj
min_dist = distance
return closest_node, min_dist
def newPoseObjectToMove(self, solution):
freeSpaceAccessible = []
newPosAvailable = []
i = 2
while solution[i].name != self.goal.name:
"trouver tous les espaces libre"
freeSpace = self.findPlace(solution[i])
"retirer l'objet qui nous interesse du graph"
for compt , objectToretire in enumerate(self.__graph) :
if objectToretire.name == solution[i].name :
tamponObj = objectToretire
jeter = self.__graph.pop(compt)
"ne garder que les espaces accessible"
for point in freeSpace :
if self.__isCollide(solution[1], point)==False :
freeSpaceAccessible.append(point)
#solution.append(point)
"Tester si les valeurs conviennent pour deplacer l'objet"
for pointA in freeSpaceAccessible :
self.__graph.append(pointA)
if self.__isCollide(solution[1], solution[i+1])==False :
if self.addValue(pointA, newPosAvailable):
newPosAvailable.append(pointA)
else :
self.__graph.pop()
self.__graph.append(tamponObj)
"Definition de la zone de depot la plus proche de l'objet"
solution[i].freeZone , _ = solution[i].getClosestZoneFromList(newPosAvailable)
newPosAvailable = []
i += 1
def addValue(self, pointA, posAvailable):
for point in posAvailable :
if point == pointA :
return False
return True
<file_sep># Another method
Here is another method that could be used to solve the problem.<file_sep>from PySide2.QtWidgets import *
from PySide2.QtGui import *
from PySide2.QtCore import *
import sys
from Node import Node
class ShelfController(QMainWindow):
def __init__(self, x_size, y_size):
super(ShelfController, self).__init__()
self.shelfRect = QRectF(0,0,x_size,y_size)
self.setWindowTitle("Shelf Grasping Controller")
self.create_ui()
self.show()
def create_ui(self):
widget = QWidget()
#### Button Node Events ####
buttonAdd = QPushButton("Add Object", self)
buttonAdd.clicked.connect(self.addNode)
buttonClear = QPushButton("Clear shelf", self)
buttonClear.clicked.connect(self.clearShelf)
############################
#### Node Info Editor #####
checkboxIsGoal = QCheckBox(self)
sizeEditor = QLineEdit()
sizeEditor.setValidator(QDoubleValidator(1.00,999.99,2))
xEditor = QLineEdit()
xEditor.setValidator(QDoubleValidator(0.00,self.shelfRect.width(),2))
xEditor.setMinimumWidth(50)
yEditor = QLineEdit()
yEditor.setValidator(QDoubleValidator(0.00,self.shelfRect.height(),2))
############################
self.scene = QGraphicsScene(self)
self.scene.setSceneRect(self.shelfRect)
self.view = QGraphicsView(self.scene, self)
groupBox = QGroupBox("Node Infos")
nodeInfosBox = QFormLayout()
nodeInfosBox.addRow("Is goal :", checkboxIsGoal)
nodeInfosBox.addRow("Size :", sizeEditor)
nodeInfosBox.addRow("X pose :", xEditor)
nodeInfosBox.addRow("Y pose :", yEditor)
groupBox.setLayout(nodeInfosBox)
hbox = QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(self.view)
hbox.addWidget(groupBox)
buttonHBox = QHBoxLayout()
buttonHBox.addStretch(1)
buttonHBox.addWidget(buttonAdd)
buttonHBox.addWidget(buttonClear)
masterVBox = QVBoxLayout()
masterVBox.addStretch(1)
masterVBox.addLayout(hbox)
masterVBox.addLayout(buttonHBox)
widget.setLayout(masterVBox)
self.setCentralWidget(widget)
def addNode(self):
newNode = Node(self.scene,10,10,100)
newNode.addToScene()
def clearShelf(self):
self.scene.clear()
if __name__ == "__main__":
app = QApplication(sys.argv)
window = ShelfController(700, 450)
sys.exit(app.exec_())
<file_sep>from math import inf
from utils.Nodes import *
from utils.RobotArm import *
class AStar():
def __init__(self, startingNode, goalNode, solver):
self.__startingNode = startingNode
self.__goalNode = goalNode
self.__solver = solver
def solve(self):
"""The goal of this function is to find the best way to reach the goal object using A* algorithm.
Which means respect robot arm mechanical constraints and passing the minimum number of objects
Returns:
[Node, int]: [Goal Node, iterations]
"""
closestList = []
openList = []
openList.append(self.__startingNode) # add starting node to openList
iteration = 0
while openList: #while open list is not empty
state = self.__getMinimumFunctionNode(openList) #get the node in openList with the minimum f function value
if state.isGoal(): #if current state is goal
return state, iteration #return state and iterations
for child in self.__solver.getSucessors(state): #for every successors of state
child.upgateGcost(state) #chil compute G cost with state as parent
child.computeHcost(self.__goalNode) #compute heuristic
child.updateFunctionValue() #update the f function
#if child is in closedList or OpenList or G function value of child is higher to the virtual computed child g cost
if not self.__checkChildInOpenList(openList, child):
if not self.__checkNodeInClosedList(closestList, child):
openList.append(child) #then add child to openList
child.setParent(state) #set state as child's parent
closestList.append(state)#add current state to closedList
iteration += 1 #increment interation
return None, iteration
def __getMinimumFunctionNode(self, nodeList):
minimum = inf
best_node = None
index = 0
for i, node in enumerate(nodeList):
if node.getFunctionValue() < minimum:
best_node = node
index = i
minimum = node.getFunctionValue()
return nodeList.pop(index)
def __checkNodeInClosedList(self, closedList, child):
for node in closedList:
if child.name == node.name:
if child.getFunctionValue() > node.getFunctionValue():
return True
return False
def __checkChildInOpenList(self, openList, child):
for node in openList:
if child.name == node.name:
if child.getFunctionValue() > node.getFunctionValue():
return True
return False
<file_sep># This Python file uses the following encoding: utf-8
from PySide2.QtWidgets import *
from PySide2.QtGui import *
from PySide2.QtCore import *
class Node(QGraphicsEllipseItem):
def __init__(self, scene = None, x=0, y=0, size=0):
super(Node,self).__init__()
self._scene = scene
self.x,self.y, self.w, self.h = x, y, size,size
self._brushColor = QBrush(Qt.blue)
self._penColor = QPen(Qt.black)
self.setBrush(self._brushColor)
self.setPen(self._penColor)
self._isGoal = False
def addToScene(self, x, y, size):
if self._isGoal:
self._brushColor = QBrush(Qt.red)
self.setBrush(self._brushColor)
self.setPen(self._penColor)
self.x = x
self.y = y
self.w = size
self.h = size
self.node = self._scene.addEllipse(self.x, self.y, self.w, self.h, self.pen(), self.brush())
self.node.setFlag(QtGraphicsItem.ItemIsMovable)
def addToScene(self):
self.node = self._scene.addEllipse(self.x, self.y, self.w, self.h, self.pen(), self.brush())
self.node.setFlag(QGraphicsItem.ItemIsMovable)
def mouseReleaseEvent(self, event):
print("hello")
return QtGui.QGraphicsEllipseItem.mouseReleaseEvent(self, event)
def hoverMoveEvent(self, event):
print("coucou")
<file_sep>from data.data import *
from utils.Nodes import *
from solver.solver import *
import matplotlib.pyplot as plt
import logging as LOGGER
from random import randint
from math import inf
import time
class shelf_object_solver():
def __init__(self, shelf_size_x, shelf_size_y, precision, randomENV=False, verbose=True):
self.dataParser = Data("script/graph/data/objects.json")
self.x_boundary = shelf_size_x
self.y_boundary = shelf_size_y
self.graph = []
self.getData(randomInit=randomENV, objectNumber=10)
self.goal = self.__getGoal()
self.solver = Solver(shelf_size_x, shelf_size_y,
precision, self.graph, self.goal)
# The position asigned to the Robot arm correspond to the scanning pose
self.__grasper = RobotArm(shelf_size_x/2, shelf_size_y + 100)
self.__verbose = verbose
def __solve(self):
start_execution_time = time.time()
objectToMove, iterations = self.solver.defineObjectToMove(
self.__grasper, "A*", occurence_test=True)
exec_time = time.time() - start_execution_time
return objectToMove, iterations, exec_time
def __getGoal(self):
for obj in self.graph:
if obj.isGoal():
return obj
return None
def getData(self, randomInit=False, objectNumber=0):
"""get data via rosservice
"""
# The following part permits to define along the shelf where the robot could pass
nb_interval = 10
interval_dist = self.x_boundary / nb_interval
for i in range(nb_interval+1):
self.graph.append(
Zone("RobotArm-Path-Point"+str(i), [i * interval_dist, self.y_boundary, 0], size=0))
if not randomInit:
self.graph += self.dataParser.parseFile()
else:
x = randint(0, self.x_boundary)
y = randint(0, self.y_boundary/4)
objSize = randint(50, 100) # Can be modified
newObj = Node(
"ObjectGoal", [{'x': x, 'y': y, 'z': 0}], isGoal=True, size=objSize)
self.graph.append(newObj)
for i in range(objectNumber - 1):
x = randint(20, self.x_boundary - 20)
y = randint(20, self.y_boundary - 20)
objSize = randint(50, 100) # Can be modified
newObj = Node(
"Object"+str(i), [{'x': x, 'y': y, 'z': 0}], isGoal=False, size=objSize)
self.graph.append(newObj)
LOGGER.info("Data Imported")
def sendData(self):
"""send data via rosservice"""
objectsToMove, iterations, exec_time = self.__solve()
LOGGER.info("Data to send")
nb_objects_move = 0
for obj in objectsToMove:
nb_objects_move += 1
if self.__verbose:
print(obj.__str__())
if self.__verbose:
print("Solve in ", iterations, "iterations")
print(nb_objects_move, "have to be moved to reach goal Object")
print("Exectution time : ", exec_time, " seconds")
return objectsToMove
def visualize(self, solution=None):
"""Cette fonction permet d'afficher le graph genere apres l'analyse de la position des objets et de leur accesibilite.
"""
x_graph = []
y_graph = []
s_graph = []
x_solution = []
y_solution = []
s_solution = []
fig, (ax1, ax2) = plt.subplots(2, 1, constrained_layout=True)
fig.suptitle("Links between Nodes and robot arm")
ax1.set_xlim([0, self.x_boundary])
ax1.set_ylim([0, self.y_boundary])
ax2.set_xlim([0, self.x_boundary])
ax2.set_ylim([0, self.y_boundary])
ax1.invert_yaxis() # reverse y axis
ax2.invert_yaxis()
ax1.set_title("Nodes connections")
ax2.set_title("Solution")
for node in self.graph:
x_graph.append(node.x)
y_graph.append(node.y)
s_graph.append(node.size)
ax1.annotate(node.name, (node.x, node.y))
if node.getChild():
for child in node.getChild():
ax1.arrow(child.x, child.y, node.x - child.x,
node.y - child.y, head_width=0.1, head_length=1, fc='b', ec='b')
if solution:
for node in solution:
ax2.annotate(node.name, (node.x, node.y))
x_solution.append(node.x)
y_solution.append(node.y)
s_solution.append(node.size)
if isinstance(node,Node) and node.freeZone:
x_solution.append(node.freeZone.x)
y_solution.append(node.freeZone.y)
s_solution.append(node.freeZone.size)
for i in range(len(solution) - 1):
ax2.arrow(solution[i].x, solution[i].y, solution[i+1].x - solution[i].x,
solution[i+1].y - solution[i].y, head_width=0.1, head_length=1, fc='b', ec='b')
ax1.scatter(x_graph, y_graph, color="k",
s=node.size*3, label="Objects")
ax2.scatter(x_solution, y_solution, color="k",
s=node.size*3)
fig.legend()
plt.show()
if __name__ == "__main__":
shelfdObjectSolver = shelf_object_solver(
800, 280, 20, randomENV=True, verbose=True)
solution = shelfdObjectSolver.sendData()
shelfdObjectSolver.visualize(solution=solution)
| abae353e38e47a2be35aa0b9b04fa43426d9e069 | [
"Markdown",
"Python"
] | 16 | Python | totordudu/shelf_object_grasping | f8c4fd1c52760416a4fc42330868aecc59a10a9c | 25c3acf97090dbc1ba6dd3871b10ae156febdf4a |
refs/heads/master | <file_sep># 分かりやすい
class Solution:
def romanToInt(self, s: str) -> int:
# rn = roman_numerals
rn = {"I":1, "V":5, "X": 10, "L":50, "C": 100, "D": 500, "M": 1000}
r = 0
for i in range(0, len(s) - 1):
if rn[s[i]] < rn[s[i+1]]:
r -= rn[s[i]]
else:
r += rn[s[i]]
return r + rn[s[-1]]
# こっちのほうが早い
class Solution:
def romanToInt(self, s):
d = {'M':1000, 'D':500, 'C':100, 'L':50, 'X':10, 'V':5, 'I':1}
z, p = 0, 'I'
for c in s[::-1]:
z, p = z - d[c] if d[c] < d[p] else z + d[c], c
print(z, p)
return z<file_sep>/**
* @param {number} x
* @return {number}
*/
var reverse = function(x) {
const number = Number(Math.abs(x).toString().split('').reverse().join(''))
if (x < 0) {
if (number > 2**31) {
return 0
} else {
return -number
}
} else {
if (number > 2**31-1) {
return 0
} else {
return number
}
}
};
// こっちのほうがきれいっすね
var reverse = function(x) {
const reversedInt = parseInt(Math.abs(x).toString().split('').reverse().join(''));
if (reversedInt > 2**31) return 0;
return reversedInt * Math.sign(x);
};
<file_sep>class Solution:
def longestCommonPrefix(self, strs: List[str]) -> str:
if not strs: return ""
if len(strs) == 1: return strs[0]
result = ""
strs.sort(key=len)
for i in range(0, len(strs[0])):
result = strs[0][0:i+1]
for k in range(0, len(strs) - 1):
if result == strs[k+1][0:i+1]:
continue
else:
return result[0:len(result)-1]
return result | 085216b63698f9f8aaea03430c7812ab17796b52 | [
"JavaScript",
"Python"
] | 3 | Python | Tomoya113/leetcode | f5958fdc6e46f677f0907bbef192899b61171765 | 5ae362d86f5dc4864ebadc905b48044bd4c8c804 |
refs/heads/master | <file_sep># naval-warfare
A replication of a well-known board game involving ships that do battle.
<file_sep>#! /usr/bin/env python
class Player:
def __init__(self, name):
self.name = name
self.board = {}
self.ships = {
'LCS': {'length': 2, 'coords': []},
'DD': {'length': 3, 'coords': []},
'SSN': {'length': 3, 'coords': []},
'CGN': {'length': 4, 'coords': []},
'CVN': {'length': 5, 'coords': []},
}
def __str__(self):
return 'Captain {0} is a player.'.format(self.name)
def generate_board(self):
for x in range(0, 10):
for y in range(0, 10):
self.board[str(x) + str(y)] = '-'
def show_board(self):
print('\n')
print('Captain {}'.format(self.name))
print('\n 0 1 2 3 4 5 6 7 8 9')
for x in range(0, 10):
row = []
for y in range(0,10):
row.append(self.board[str(x) + str(y)])
print('{0} {1}'.format(str(x), ' '.join(row)))
print('\n')
def sonar(self, space):
shipspaces = []
for ship in self.ships:
for coord in self.ships[ship]['coords']:
shipspaces.append(coord)
if space in shipspaces:
return 'ship'
elif int(space) > 99 or int(space) < 0:
return 'out of range'
else:
return 'empty'
def place_ship(self, ship, startpoint, direction):
coords = [startpoint]
coord = int(startpoint)
interval = 0
if direction == 'n':
interval = -10
elif direction == 's':
interval = 10
elif direction == 'e':
interval = 1
elif direction == 'w':
interval = -1
for space in range(1, self.ships[ship]['length']):
coord -= 10
coords.append(str(coord))
valid_placement = True
for coord in coords:
if self.sonar(coord) == 'empty':
pass
else:
valid_placement = False
if valid_placement:
for coord in coords:
self.ships[ship]['coords'].append(coord)
else:
print("Error placing {0}. A ship already exists there, \
or placement would be off map".format(ship))
def attack(self, target):
if self.board[target] == '-':
if self.sonar(target) == 'empty':
self.board[target] = 'o'
elif self.sonar(target) == 'ship':
print("Good hit!")
self.board[target] = 'X'
elif self.sonar(target) == 'out of range':
print('Specified target location out of range.')
else:
print('You have already fired at that location!')
# MAIN
human = Player('Human')
cpu = Player('CPU')
human.generate_board()
cpu.generate_board()
human.place_ship('CVN', '44', 'n')
human.place_ship('DD', '74', 'n')
human.place_ship('LCS', '94', 'n')
human.place_ship('SSN', '99', 'n')
human.place_ship('CGN', '57', 'n')
#for ship in human.ships:
# print("{0}: {1}".format(ship, human.ships[ship]['coords']))
human.attack('34')
human.attack('35')
human.attack('25')
human.attack('72')
human.attack('99')
human.attack('79')
human.attack ('38')
human.show_board()
| 169bd98aa84481062345e24f507398b12923e7be | [
"Markdown",
"Python"
] | 2 | Markdown | starsparrow/naval-warfare | ad975280573bfffa27d8ab9c7ee7c0591eb17856 | c82bbd9fc094fbc8b09c76e6291dfe62b3c0f9b6 |
refs/heads/master | <repo_name>rohit2b/rtmAnalyze<file_sep>/atomEntry.py
class atomEntry:
"""Represents an entry in the atom feed from RTM.com"""
def __init__(self):
self.author
self.updatedTime
self.completedTime
self.dueTime
self.priority
self.timeEstimate
self.tags
self.location
self.hasBeenpostponed
self.rtmList
return
<file_sep>/parse.py
import xml.etree.ElementTree as ET
def tryParse():
root = ET.Element("html")
head = ET.SubElement(root, "head")
title = ET.SubElement(root, "title")
title.text = "Page Title"
tree = ET.ElementTree(root)
return tree
tree = tryParse()
ET.dump(tree)
<file_sep>/rtmAnalyze.py
import xml.etree.ElementTree as ET
def loadXMLFile(fileName):
tree = ET.parse(fileName)
root = tree.getroot()
print root.tag
return tree
def iterateTree(tree):
iter = tree.iter()
c = 0
for elem in iter:
print "elem: " + elem.tag
c = c+1
if c>10:
if(elem.text):
print "text: " + elem.text
if(elem.keys()):
print "has attribs: "
print elem.keys()
if c>20:
break
return 0
def parseEntry(entryElem):
return entry
tree = loadXMLFile("rtm-all-completed-tasks.xml")
iterateTree(tree)
<file_sep>/README.md
rtmAnalyze
==========
Store and analyze RTM tasks | e6bc2229564a50fb8656783764f6271d24bbc066 | [
"Markdown",
"Python"
] | 4 | Python | rohit2b/rtmAnalyze | aa6fd2faab5379b4ba661cc2c8dcc87b0b2d1fa0 | 0afc3dac409593d2e1364336e06bb15fd26dc71f |
refs/heads/master | <file_sep>import java.io.EOFException;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.rmi.RemoteException;
import java.rmi.server.UnicastRemoteObject;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;
import javax.swing.JOptionPane;
/**
* The Account Factory - that will be used remotely that will be Serializable
* and that will implement the Account Interface
* @author <NAME> - A00190040
*
*/
public class AccountFactory extends UnicastRemoteObject implements AccountInterface,Serializable{
/**
* Used 'serialVersionUID' because serialVersionUID is used to ensure that during deserialization
* the same class (that was used during serialize process) is loaded.
*/
private static final long serialVersionUID = -8479767892506957644L;
// Declaration
private static AccountFactory theInstance = null;
private Vector<Account> theListOfAccounts = new Vector<Account>();
private DecimalFormat df = new DecimalFormat("\u20AC ##,###,##0.00"); // format the balance output
/**
* A private constructor to create a Account Factory instance.
* So only one object can be made
* @throws RemoteException
*/
private AccountFactory() throws RemoteException {
System.out.println("Constructor for factory has been called");
onStart();
}
/**
* A method to get an instance of the account factory
* @return - AccountFactory
* @throws RemoteException
*/
public static AccountFactory getInstance() throws RemoteException {
if (theInstance == null) {
theInstance = new AccountFactory();
return theInstance;
} else {
System.out.println("The factory does already exist");
return theInstance;
}
}
/**
* A method to load the saved account information form file
*/
public void onStart() {
try {
FileInputStream aFileInStream = new FileInputStream("bankOfRorkBackUp.ser");
ObjectInputStream aObjectInStream = new ObjectInputStream(aFileInStream);
while(true){
try{
Account savedAccounts = (Account) aObjectInStream.readObject();
theListOfAccounts.add(savedAccounts);
}catch(EOFException e){
aObjectInStream.close();
break;
}
}
System.out.println("The object references has been read from the back-up file");
} catch (Exception e1) {
JOptionPane.showMessageDialog(null,
"There Was A Problem Reading The Data from File.\n"
+ "\nPlease call the IT Department", "System Error", JOptionPane.ERROR_MESSAGE);
e1.printStackTrace();
System.exit(0); // Exit the program
}
}
/**
* A method to Save the contents of the list of account to the file
*/
public void onSave() {
try {
FileOutputStream out = new FileOutputStream("bankOfRorkBackUp.ser");
ObjectOutputStream oos = new ObjectOutputStream(out);
for (Object a : theListOfAccounts) {
Account acc = (Account) a;
oos.writeObject(acc);
}
oos.close();
} catch (Exception e1) {
JOptionPane.showMessageDialog(null,"There Was A Problem backing up the accounts.\n" +
"\nPlease call the IT Department","System Error",JOptionPane.ERROR_MESSAGE);
e1.printStackTrace();
}
}
/**
* A method to create an account.
*
* @param aANum - Account Number
* @param aFName - First Name
* @param aLName - Second Name
* @param aAge - Age
* @param aSName - Street Name
* @param aTCity - Town/City Name
* @param aRegion - Region Name
* @param aCountry - Country Name
* @param aBalance - Initial Balance
*/
public void createAccount(int aANum, String aFName, String aLName,int aAge, String aSName,
String aTCity, String aRegion, String aCountry, double aBalance) {
System.out.println("Creating an Account called " + aANum);
Account aAccount = new Account(aANum,aFName,aLName,aAge,aSName,aTCity,aRegion,aCountry,aBalance);
theListOfAccounts.add(aAccount);
onSave(); //Save the updated list of Accounts
}
/**
* A method to return a full list of account as and array list
* @return - ArrayList<String>
*/
public ArrayList<String> getAllAccounts(){
ArrayList<String> accountList = new ArrayList<String>();
// Sort the Vector list of accounts
Collections.sort(theListOfAccounts);
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
accountList.add(""+acc.getAccountNum());
accountList.add(acc.getFirstName());
accountList.add(acc.getLastName());
accountList.add(""+acc.getAge());
accountList.add(acc.getStreetName());
accountList.add(acc.getTownCity());
accountList.add(acc.getRegion());
accountList.add(acc.getCountry());
accountList.add(df.format(acc.getBalance())); // format the balance output
}
return accountList;
}
/**
* A method to return a particular account form the list of account as and array list
* @param - num - Account number to search for.
* @return - ArrayList<String>
*/
public ArrayList<String> getAccountById(int num){
ArrayList<String> accountList = new ArrayList<String>();
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
// Check to see if the entered number is the same as the present number
if(num == acc.getAccountNum())
{ // if it is return the account
accountList.add(""+acc.getAccountNum());
accountList.add(acc.getFirstName());
accountList.add(acc.getLastName());
accountList.add(""+acc.getAge());
accountList.add(acc.getStreetName());
accountList.add(acc.getTownCity());
accountList.add(acc.getRegion());
accountList.add(acc.getCountry());
accountList.add(df.format(acc.getBalance())); // format the balance output
}
}
return accountList;
}
/**
* A method to check the accounts for a persons last name
* @param - value - text to search
* @param - field - field to search
* @return - ArrayList<String> - a list of accounts
*/
public ArrayList<String> getAccountByField(String value, String field){
ArrayList<String> accountList = new ArrayList<String>();
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
if(value.equalsIgnoreCase(acc.getLastName()) && field.equals("Last Name"))
{
accountList.add(""+acc.getAccountNum());
accountList.add(acc.getFirstName());
accountList.add(acc.getLastName());
accountList.add(""+acc.getAge());
accountList.add(acc.getStreetName());
accountList.add(acc.getTownCity());
accountList.add(acc.getRegion());
accountList.add(acc.getCountry());
accountList.add(df.format(acc.getBalance())); // format the balance output
}
}
return accountList;
}
/**
* A method delete an account and then save it
* @param - num - the account number to be deleted
* @return - Boolean - true(account deleted) or false(could not delete or not found)
*/
public boolean deleteAccount(int num){
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
// If account matches the account being searched for
if(num == acc.getAccountNum())
{
theListOfAccounts.remove(a);
onSave(); //Save the updated list of Accounts
return true;
}
}
return false;
}
/**
* A method to deposit money in to an account and save the update of the account list
* @param - accNum - The account number
* @param - Amount to be deposited
*/
public void depositMoney(int accNum, double amount){
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
// If account matches the account being searched for
if(accNum == acc.getAccountNum())
{
acc.deposit(amount);
onSave(); //Save the updated list of Accounts
}
}
}
/**
* A method to withdraw money from an account and save the update of the account list
* @param - accNum - The account number
* @param - Amount to be deposited
* @return - String - success, insufficient, noaccount
*/
public String withdrawMoney(int accNum, double amount){
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
// If account matches the account being searched for
if(accNum == acc.getAccountNum())
{
if(acc.withdraw(amount)){
onSave(); //Save the updated list of Accounts
return "success"; // true - success
}else{
return "insufficient"; // False - No enough money
}
}
}
return "noaccount"; //Account does not exist
}
/**
* Check to see if an account number is already in the system.
* All also used to check if it is not in the system.
* Used by new account and updating accounts
*
* @param - accNum - The account number
* @return - Boolean - true(Account number is not valid - present)
* false(Account number is valid - not present)
*/
public boolean accountNumberCheck(int accNum){
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
if(accNum == acc.getAccountNum())
{
return false; // Account number is not valid - present
}
}
return true; // Account number is valid - not present
}
/**
* A method to update the account information in the account list
*
* @param accountNum - Account Number
* @param fName - First Name
* @param lName - Second Name
* @param age - Age
* @param sName - Street Name
* @param town - Town/City Name
* @param region - Region Name
* @param country - Country Name
* @param balance - Initial Balance
*/
public void updateAccount(int accountNum, String fName, String lName, int age, String sName,
String town, String region, String country, double balance){
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
// If account matches the account being searched for
if(accountNum == acc.getAccountNum())
{
acc.setFirstName(fName);
acc.setLastName(lName);
acc.setAge(age);
acc.setStreetName(sName);
acc.setTownCity(town);
acc.setRegion(region);
acc.setCountry(country);
acc.setBalance(balance);
onSave(); //Save the updated list of Accounts
break;
}
}
}
/**
* A method to get the transaction of a given account used in the show all feature
* @param - num - The account number
* @return - Vector<Transactions> - returns a vector of accounts
*/
public Vector<Transactions> showAllTranactions(int num){
Vector<Transactions> accountTrans = new Vector<Transactions>();
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
if(num == acc.getAccountNum())
{
accountTrans = acc.getTransactions();
}
}
return accountTrans;
}
/**
* A method to list and find the statistics of one field vrs another
*
* @param - statA - The field
* @param - statB - Balance
* @return - Map<String,Double> - The data for the statistics
*/
public Map<String,Double> getStasAVStatB(String statA, String statB){
// Need to use the 'ConcurrentHashMap' so i can add and search thought the hash map
Map<String,Double> statMap = new ConcurrentHashMap<String,Double>();
boolean firsttime = true;
boolean notThere = true;
//Region Vrs Balance
if(statA.equalsIgnoreCase("Region")&&statB.equalsIgnoreCase("Balance"))
{
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
notThere = true;
if(firsttime){
// add the fields and balance all it to the list the first time
statMap.put(acc.getRegion(), acc.getBalance());
firsttime = false; // change first time to false
}else{
// create an iterator to search through the new list
// to check for duplicates on the field
Iterator<Map.Entry<String,Double>> iter = statMap.entrySet().iterator();
// loop through the tree
while (iter.hasNext()) {
// Take the entry from the iterator
Map.Entry<String,Double> entry = iter.next();
// check to see if the field is equal to a key
if(entry.getKey().equalsIgnoreCase(acc.getRegion()))
{
// if equal add the balance and update the balance of the key
double balan = entry.getValue()+acc.getBalance();
entry.setValue(balan);
notThere = false;
}
}
if(notThere) // if it is not already there add it to the list
{
statMap.put(acc.getRegion(), acc.getBalance());
}
}
}
}else
{
// Town Vrs Balance
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
notThere = true;
if(firsttime){
// add the fields and balance all it to the list the first time
statMap.put(acc.getTownCity(), acc.getBalance());
firsttime = false; // change first time to false
}else{
// create an iterator to search through the new list
// to check for duplicates on the field
Iterator<Map.Entry<String,Double>> iter = statMap.entrySet().iterator();
// loop through the tree
while (iter.hasNext()) {
// Take the entry from the iterator
Map.Entry<String,Double> entry = iter.next();
// check to see if the field is equal to a key
if(entry.getKey().equalsIgnoreCase(acc.getTownCity()))
{
// if equal add the balance and update the balance of the key
double balan = entry.getValue()+acc.getBalance();
entry.setValue(balan);
notThere = false;
}
}
if(notThere) // if it is not already there add it to the list
{
statMap.put(acc.getTownCity(), acc.getBalance());
}
}
}
}
return statMap;
}
/**
* A method to list and find the statistics of the age of the clients
* similar to "getStasAVStatB()" but needed to return two integers
* @return - Map<Integer,Integer> - The data for the statistics
*/
public Map<Integer,Integer> getAgeOfClients(){
Map<Integer,Integer> statAgeMap = new ConcurrentHashMap<Integer,Integer>();
boolean firsttime = true;
boolean notThere = true;
// Age of Clients
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
notThere = true;
if(firsttime){
// add the fields and balance all it to the list the first time
statAgeMap.put(acc.getAge(), 1);
firsttime = false; // change first time to false
}else{
// create an iterator to search through the new list
// to check for duplicates on the field
Iterator<Map.Entry<Integer,Integer>> iter = statAgeMap.entrySet().iterator();
while (iter.hasNext()) {
// loop through the tree
Map.Entry<Integer,Integer> entry = iter.next();
// check to see if the field is equal to a key
if(entry.getKey() == acc.getAge())
{
// if equal add the balance and update the balance of the key
int count = entry.getValue() + 1;
entry.setValue(count);
notThere = false;
}
}
if(notThere)
{
statAgeMap.put(acc.getAge(), 1); // if it is not already there add it to the list
}
}
}
return statAgeMap;
}
/**
* This methods return an arrayList of dynamic Account Statistics.
*
* @return - ArrayList<Double> - A Array List of Overall Account Statistics
*/
public ArrayList<Double> getAccountStatistics(){
// declaration of variables needed
ArrayList<Double> accountStats = new ArrayList<Double>();
double count = 0;
double totalMoney = 0;
double averMoney = 0;
// Initialisation of max and min account variables
double maxAccount = theListOfAccounts.firstElement().getBalance();
double minAccount = theListOfAccounts.firstElement().getBalance();
double totalAge = 0;
double averAge = 0;
// iterate through the vector
for(Object a : theListOfAccounts){
Account acc = (Account) a;
count++; // count the amount of accounts
totalMoney +=acc.getBalance(); // Add the total balances
totalAge += acc.getAge(); // Add the total ages
if(maxAccount< acc.getBalance()) // Find the max Account
{
maxAccount = acc.getBalance();
}
if(minAccount > acc.getBalance()) // Find the smallest(Min) account
{
minAccount=acc.getBalance();
}
}
averMoney = totalMoney/count; // Finds the average money in each account
averAge = totalAge/count; // Finds the average age of the customers
// Add them to the list.
accountStats.add(totalMoney);
accountStats.add(averMoney);
accountStats.add(maxAccount);
accountStats.add(minAccount);
accountStats.add(averAge);
accountStats.add(count);
return accountStats;
}
}
<file_sep>import java.rmi.Naming;
import java.rmi.registry.LocateRegistry;
import java.rmi.registry.Registry;
/**
* The bank of rory server
* @author <NAME> - A00190040
*/
public class BankOfRoryServer {
public static void main(String args[]){
try{
System.out.println("Bank Of Rory Server Starting ....");
// Get an instance of the Account Factory
AccountFactory aFactory = AccountFactory.getInstance();
// Bind it to the string 'BankOfRoryServer' in the RMI Registry
Naming.rebind("BankOfRoryServer", aFactory);
System.out.println("RMI Server ready....");
System.out.println("Waiting for Request...");
}
catch(Exception e){
System.out.println("Count not start the Bank Of Rory Server ....");
e.printStackTrace();
}
}
}
<file_sep>#![alt text][logo] Bank of Rory RMI Assignment
[logo]: https://raw.github.com/rorynee/bankOfRoryRMIAssignment/master/bin/images/borlogo.png "Bank Of Rory Logo"
[Click here to view demo video]( https://www.youtube.com/watch?v=fAkrr7ldYlQ "Implementing a 3 Tier Object Oriented System")
## Assignment Outline
Implementing a 3 Tier Object Oriented System
Objective: This project will create a 3 tier system in Java:
**Tier 1** – Will consist of a GUI system implemented in Swing. The GUI should consist of a table on which the user can implement CRUD – Create, Read, Update and Delete actions.
**Tier 2** – Will consist of a Java server. The Java server will communicate with the client using RMI. The Java server will implement the Factory design pattern.
**Tier 3** – Will consist of Java Objects stored persistently. Do not implement the persistence tier using a DB.
##Assignment concept
I choose to generate the data set around bank customers and account as I felt it lent itself to C.R.U.D.L. operations.
Create – Make a new account
Read – Search, Read transactions, read the statistics
Update – Update account information, deposit money and withdraw money
Delete – Delete an account
List – Show all accounts
I created a bank account system that can be used by bank teller to Create, Read, Update, Delete and List the accounts. As part of the bank accounts I also wanted to keep track of a customer’s transactions (withdraw and deposit) just like a real bank would do.
Also I wanted to be able to generate dynamic statistics in relation to that data so I could generate 3d bar charts, line graph and other data in relation the account held in a bank.
***
## Installation and Usage
**1.** Download the files in to Eclipse
**2.** Open 3 command prompts and navigate to the bin directory
In one command shell, in the bin directory **Run** rmic.exe program. This Generates the Stub classes
*In my case with the project bin folder selected > “C:\Program Files\Java\jdk1.7.0_15\bin\rmic.exe” AccountFactory*
In another of the command shells - start the RMI registry using the command “rmiregistry”
*In my case with the project bin folder selected > “C:\Program Files\Java\jdk1.7.0_15\bin\ rmiregistry.exe”*
In another command shell – start the server “java ProductServer”
*In my case with the project bin folder selected > java BankOfRoryServer*
**3.** Open Eclipse and run the file MainGuiClass
***
##System Architecture
I set up the project as described in the “Implementing a 3 Tier Object Oriented System” outline. My ‘AccountFactory’ class implements the ‘AccountInterface’ class. The ‘Account’ class is aggregated to the ‘AccountFactory’ class and the ‘transaction’ class is aggregated to the ‘Account’ class. The Account factory through the Account interface uses RMI’s mechanism of the RMI studs and the RMI registry to connect with the server. The server in turn can be connected by the ‘MainGuiClass’ and using an Identification (URL) to find the target machine where the RMI registry and remote objects are located. Then the ‘MainGuiClass’ requests the RMI registry on the target machine to return an object reference that applies to the well-known name or password. Then using the ‘AccountInterface’ it can pass the objects back and forth to each other.
## Class Descriptions
See classdiagram.png for more information.
###BankOfRoryServer
The Bank Of Rory Server binds the well-known (“BankOfRoryServer”) name to the object of the account factory and stores that in the Rmi registry.
###Account.Java
The Account class outlines all the information involved in a bank account. The Account class also uses a vector of type ‘Transactions’ to hold all the transactions that are made by an account. The types of transactions that are take note of is when the account was created and the deposit and withdraw money. I have also implemented the comparable interface so I will be able to compare one account to another account using the account number. This is used when sorting the account numbers in the table.
###Transactions
The Transaction class outlines the information that is used to make up a transaction. Here the time stamp is generated using a simple data format and the balance is also noted at the time of the transaction been made. There is no setter methods here are I did not want somebody to be able to change the transactions once they were made.
###AccountInterface
The account interface acts as an interface for the account factory. The RMI stubs are created using the interface.
###AccountFactory
The account factory holds all the methods for the CRUDL actions.
Create – createAccount()
Read – getAccountById(), getAccountByField(), showALLTranactions(), getAgeOfClients, getStasAVStatB(), getAccountStatistics()
Update – updateAccount(),depositMoney(), withdrawMoney()
Delete – deleteAccount()
List – getAllAccounts()
The method called getInstance() makes only one instantiation of the AccountFactory and give back a reference to the object of the account factory if another class requests to make another object of the account factory.
There is also a method to check weather an account number is in the system. I also have a onStart() and an onSave() method to serialize and desterilize the data to file.
###MainGuiClass
The MainGuiClass sets up the Gui that the user sees. As you can see there is a lot of attributes declared in the MinGuiClass. In the code I have reused ‘private JLabel’ a few times as there is a lot of label and did not want to have them untidy.
A few thing to note, I have created an ArrayList of Jpanels to be used by a method to hide all panels and to make visible the panel passed to it (showHide() method).
The table was set up using the defaultTableModel with a 2d array called ‘table’ and two arrays for the heading called ‘listSetHeader’ and ‘transHeader’.
There are two separate decimal format mentioned here to accommodate the information in the table and for the input of new values (new account, update account).
The variable called ‘buttonCheck’ is used to see what radiobutton is checked so I can clear it.
Also here you see the inclusion of theURL and the AccountInterface used by RMI. The Accouny Factory will be referred to as ‘theFactory’ when it is been called in the GUI.
###Methods
I have employed the use of multiple helper methods here so as not to clutter up the code.
setUpMenu(); // set up the drop down menu
setUpPanals(); // set up the panels to be used
setUpLogo(); // Set up to Logo and the header label
setUpMiddlePanel(); // set up the middle panel
setUpRadioPanel(); // set up the panel with the radio buttons
setUpOutputPanel(); // set up the main display output area
The actionPerformed method handles all the events form the user.
There other methods of note hare are the showHide() ( I mentioned earlier ) and the checkIntFields() and CheckDoubleFields(). The latter two function test weather a number is a number and not text.
I also have two different function that print the data to the table because I show the data different in the show history page. (printAccounts() and showTransactions()).
##Additional Information
Please view the Assignment3_WriteUp.pdf for further information about this assignment.
<file_sep>import java.io.Serializable;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
/**
* The Transaction class holds all the details of a Transaction of an account.
* The Transaction class implements Serializable so the information can be saved to file
* @author <NAME> - A00190040
*/
public class Transactions implements Serializable {
/**
* Used 'serialVersionUID' because serialVersionUID is used to ensure that during deserialization
* the same class (that was used during serialize process) is loaded.
*/
private static final long serialVersionUID = -3798520143258848268L;
//Declarations - Set to private so they cannot be accessed directly
private String timeStamp;
private String trans;
private String CBalance;
private DecimalFormat df = new DecimalFormat("\u20AC ##,###,##0.00"); // format the balance output
/**
* Transaction Constructor
* @param transaction - The transaction made
* @param bal - Current Balance of the Account at time of transaction
*/
public Transactions(String transaction, double bal){
timeStamp = new SimpleDateFormat("dd/MM/yyyy - HH:mm").format(Calendar.getInstance().getTime());
trans = transaction;
CBalance = df.format(bal);
}
/**
* A method to get the time stamp of the transaction
* @return - String - Time Stamp
*/
public String getTimeStamp() {
return timeStamp;
}
/**
* A method to get the transaction
* @return - String - Transaction
*/
public String getTrans() {
return trans;
}
/**
* A method to get the current balance at time of transaction
* @return
*/
public String getCBalance() {
return CBalance;
}
}
| b527b7dae30c2851fa1e8173e716e000f1a5a619 | [
"Markdown",
"Java"
] | 4 | Java | rorynee/bankOfRoryRMIAssignment | 27a96e5d475b41aaf2c8bb9d5aa08ea11dc03b4b | a91992d3a925647d0924e6af28ef9179d02e4f57 |
refs/heads/master | <file_sep><?php
/**
* Basic concept: Interface to the Processes MySQL table
*
* Uses:
*
*/
require_once( 'MySqlObject.php' );
require_once( 'servers.php' );
class Processes extends MySQLObject
{
private $modifiedServers = array();
private $lastUpdateTime = 0;
function getLastUpdateTime()
{
return $this->lastUpdateTime;
}
function getModifiedServers()
{
return $this->modifiedServers;
}
function mergeModifiedServer( $serverid )
{
if( !in_array( $serverid, $this->modifiedServers ) )
{
$serverObj = new Servers();
$serverInfo = $serverObj->getServerByID( $serverid );
$this->redistributeCPU( $serverInfo );
$this->modifiedServers[] = $serverid;
}
}
function getColumns( )
{
return array( 'ID', 'TARGET_PROGRAM', 'OWNING_SERVER', 'CPU_USAGE',
'RAM_USAGE', 'BW_USAGE', 'OPERATION', 'LINKED_ID',
'CYCLES_COMPLETED', 'CYCLES_REMAINING' );
}
function getTableName( )
{
return 'PROCESSES';
}
function addProcess( $target, $owningServer, $cpu, $ram, $bw, $operation,
$remainingCycles )
{
$this->mergeModifiedServer( $owningServer );
return $this->insert( array( 'NULL', $target, $owningServer, $cpu,
$ram, $bw, $operation, 0, 0,
$remainingCycles * 1000 ) );
}
function addRemoteProcess( $target, $ownerServer, $targetServer, $ownerCPU,
$targetCPU, $ownerRAM, $targetRAM, $bw,
$operation, $remainingCycles )
{
$this->mergeModifiedServer( $ownerServer );
$this->mergeModifiedServer( $targetServer );
$id1 = $this->insert( array( 'NULL', $target, $ownerServer, $ownerCPU,
$ownerRAM, $bw, $operation, 0, 0,
$remainingCycles ) );
$id2 = $this->insert( array( 'NULL', $target, $targetServer, $targetCPU,
$targetRAM, $bw, $operation, $id1, 0,
$remainingCycles * 1000 ) );
$this->update( array( 'LINKED_ID' => $id2), array( 'ID' => $id1 ) );
return array( $id1, $id2 );
}
function getProcessByID( $id )
{
return $this->getSingle( $id );
}
function getProcessesByServer( $serverid )
{
return $this->get( array( 'OWNING_SERVER' => $serverid ),
array( 'ID' => 'ASC' ) );
}
function getProcessesByProgram( $programid )
{
return $this->get( array( 'TARGET_PROGRAM' => $programid ),
array( 'ID' => 'ASC' ) );
}
function getConsumptionByServer( $serverid )
{
$ret = $this->get( array( 'OWNING_SERVER' => $serverid ), null, 1,
array( 'SUM(RAM_USAGE) AS USED_RAM',
'SUM(CPU_USAGE) AS TOTAL_CPU' ) );
return $ret[ 0 ];
}
function getAllProcesses( )
{
return $this->get( NULL, array( 'ID' => 'ASC' ) );
}
function getHDDConsumersByServer( $serverid )
{
return $this->getCustom( 'SELECT P.SIZE, P.TYPE, R.* FROM ' .
'PROCESSES AS R INNER JOIN PROGRAMS AS P ON ' .
'P.ID=R.TARGET_PROGRAM WHERE ' .
"R.OWNING_SERVER=$serverid AND " .
'P.TYPE IN(' .
implode( ',', getHDDConsumingOperations() ) .
') ORDER BY R.ID' );
}
function deleteProcess( $id, $serverid )
{
$this->mergeModifiedServer( $serverid );
return $this->delete( array( 'ID' => $id ) );
}
function calculateServerRatio( $serverid, $servercpu )
{
$procs = $this->getProcessesByServer( $serverid );
if( count( $procs ) == 0 )
{
return array( 'LAST_UPDATE_TIME' => 0,
'OPERATING_RATIO' => 0 );
}
$cpuTotal = 0;
foreach( $procs as $proc )
{
$cpuTotal += $proc[ 'CPU_USAGE' ];
}
$ratio = round( ( $servercpu / $cpuTotal ), 4 );
return array( 'LAST_UPDATE_TIME' => $this->lastUpdateTime,
'OPERATING_RATIO' => $ratio );
}
function redistributeCPU( $serverInfo )
{
$serverid = $serverInfo[ 'ID' ];
$serverratio = $serverInfo[ 'OPERATING_RATIO' ];
$serverupdate = $serverInfo[ 'LAST_UPDATE_TIME' ];
$procs = $this->getProcessesByServer( $serverid );
// Update last update time
if( $this->lastUpdateTime == 0 )
{
$this->lastUpdateTime = time();
}
// If there aren't any processes, no need to redistribute
if( count( $procs ) == 0 )
{
return;
}
$cpuTotal = 0;
foreach( $procs as $proc )
{
$cpuTotal += $proc[ 'CPU_USAGE' ];
}
$nowtime = $this->lastUpdateTime;
if( $serverratio != 0 )
{
foreach( $procs as $proc )
{
$previousConsumed = $proc[ 'CYCLES_COMPLETED' ];
$perSecondRatio = $proc[ 'CPU_USAGE' ] * $serverratio;
$elapsedTime = $nowtime - $serverupdate;
$completedCycles = $perSecondRatio * $elapsedTime;
$newCompleted = $previousConsumed + $completedCycles;
$newRemaining = $proc[ 'CYCLES_REMAINING' ] - $completedCycles;
if( $newRemaining < 0 )
{
$newRemaining = 0;
}
$this->update( array( 'CYCLES_COMPLETED' => $newCompleted,
'CYCLES_REMAINING' => $newRemaining ),
array( 'ID' => $proc[ 'ID' ] ) );
echo( "updateProcessProgress({$proc['ID']},$newCompleted," .
"$newRemaining);" );
}
}
}
}
?><file_sep>/**
* @file gui.js
*
* @todo On window resize, ensure popup height is less than max
* @todo Sort start menu buttons as added
*/
function indexSetup()
{
// Setup the basic layout
$("body").empty()
.css( "padding", "0px" )
.append(
$("<div id='layout-container'>")
.append("<div id='news' class='ui-layout-east'></div>")
.append("<div id='header' class='ui-layout-north'></div>")
.append("<div id='main' class='ui-layout-center'></div>")
);
// Make the layout into a useable one
$("#layout-container").layout({
north: {
closable: false
, resizable: false
, spacing_open: 0
}
, east: {
closable: false
, resizable: false
, spacing_open: 0
}
}).sizePane("north", 79);
$("#main")
.append("<div id='main-center'>Welcome to Macro Web Security.</div>");
doLogin();
$("#header")
.append("<div id='logofull'></div><div id='logo'></div>");
$("<div>")
.append("<br>News RSS<br><br>The News will go here and we can do some" +
"swoopy stuff to it once we start updating the site to normal users. " +
"To include a full blown function just to pull it from the DB.")
.appendTo("#loginform");
resetqtip();
}
/**
* Call to reset all qtip's using default settings
*/
function resetqtip()
{
$("[title]").qtip();
}
/**
* Called upon successful login. Sets up everything about the GUI and prepares
* the client for user input. Creates the layout, adds the taskbar to the
* layout. Also adds a center section for performing the main views. Creates
* several windows that are used for distinct views. Finally sets up the start
* menu so that each button is associated with a specific event.
*
* @param id Unused really...
*/
function validLogin( id )
{
// Setup the basic layout
$("body").html("")
.css( "padding", "0px" )
.append(
$("<div id='layout-container'>")
.append("<div id='taskbar' class='ui-layout-south'></div>")
.append("<div id='east' class='ui-layout-east'>Chat(closeable)</div>")
.append("<div id='center' class='ui-layout-center'></div>")
);
// Make the layout into a useable one
$("#layout-container").layout({
south: {
closable: false
, resizable: false
, spacing_open: 0
}
, east: {
initClosed: true
, resizable: false
}}).sizePane("south", 44);
// Add the start menu
$("#taskbar")
.addClass("slide")
.append("<div id='menu' class='inner ui-corner-tr'>LAD Task Menu</div>")
.append("<div id='start' class='start-menu-button'></div>");
// Set up the taskbar to identify with the popup class
$("#taskbar")
.jTaskBar({'winClass': '.popup', 'attach': 'bottom'});
// Start the start menu hidden
$("#menu").css({"display" : "none"});
// Open the start menu on click
$('#start').live("click",function(){
if($(this).hasClass('active'))
{
$(this).removeClass('active');
$("#menu").slideToggle('slow',function(){
$("#layout-container").layout().resetOverflow('south');
});
}
else
{
$(this).addClass('active');
$("#layout-container").layout().allowOverflow('south');
$("#menu").slideToggle('slow',function(){
$(this).css('height', $("#menu").height() + "px");
});
}
});
// Add the logout button to the start menu
addMenuButton( "Logout", "ui-icon-power", function(){
window.location = '';
doLogin();
});
// Ensure that each popup is sized properly whenever the window resizes
$(window).resize(function() {
$('div.popup')
.css( "max-height", $("#center").height() )
.css( "max-width", $("#center").width() );
$('div.popup_body')
.css( "max-height", $("#center").height() - 22 )
.css( "max-width", $("#center").width() );
resizeHeight($('div.popup_body'));
resizeWidth($('div.popup_body'));
});
// Setup the start menu to hide if something other than it is clicked and
// it is open
$('#center, #east, #taskbar :not(#start,#menu)').click(function(){
var start = $('#start');
if( start.hasClass( 'active' ) )
{
start.removeClass('active');
$("#menu").slideToggle('slow',function(){
$("#layout-container").layout().resetOverflow('south');
});
}
});
// Initialize the options window
initOptions();
}
function addMenuButton( name, icon, fn )
{
var id = name.replace( /\s/, '_' );
var buttonlist = $("#menu button");
var button = $( "<button id='" + id + "'>" + name + "</button>" );
if( buttonlist.length == 0 || name == "Logout" )
{
$("#menu").append( button );
}
else
{
$("#menu button").each(function(){
var tobj = $(this);
var text = tobj.text();
if( text.localeCompare( name ) > 0 || text == "Logout" )
{
button.insertBefore( tobj );
return false;
}
return true;
});
}
var menuobj = $("button#" + id);
if( icon != undefined )
{
menuobj.button({icons: {primary: icon}});
}
var enclosedfn;
if( fn == undefined )
{
enclosedfn = function(){
alert( name + " INW" );
};
}
else
{
enclosedfn = function( id ){
var obj = $('div#' + id);
// Call function if there is no window
if( obj.length == 0 )
{
fn();
return;
}
// Make sure it's a popup
if( !obj.hasClass( "popup" ) )
{
obj.addClass('popup');
}
// Show the window
if( obj.css('display') == 'none' )
{
// Load offsets
var x = getPermCache( "win-" + id + "-x" );
var y = getPermCache( "win-" + id + "-y" );
var w = getPermCache( "win-" + id + "-width" );
var h = getPermCache( "win-" + id + "-height" );
obj.css({
'left': x,
'top': y,
'width': w,
'height': h
});
var pu = getPopupContext( id );
pu.css({
'width': w,
'height': (toNumber( h.replace( /[p-x]/g, '') ) ) - 22
});
obj.fadeIn().queue(function(){
$(this).updatejTaskBar();
obj.trigger( 'mousedown' );
$(this).dequeue();
});
getPopupContext( id ).empty();
fn();
}
// Fade in the taskbar entry
if( $('#jTaskBar').find('div#'+id).hasClass('jTask-hidden') )
{
$('#jTaskBar').find('div#'+id).removeClass('jTask-hidden');
obj.fadeIn().queue(function(){
$(this).updatejTaskBar();
$(this).dequeue();
});
}
}
}
menuobj.click(function(){
$('#start').click();
enclosedfn( id );
});
}
function createWindow( name, resizeProps )
{
var id = name.replace( /\s/, '_' );
// Setup resize options
if( resizeProps == undefined )
{
resizeProps = {};
}
if( !resizeProps.containment )
{
resizeProps.containment = '#center';
}
if( !resizeProps.alsoResize )
{
resizeProps.alsoResize = "#" + id + "pu";
}
if( !resizeProps.handles )
{
resizeProps.handles = "n, e, s, w, ne, nw, se, sw";
}
$($("<div class='popup' id='" + id + "'></div>"))
.append($("<div class='popup_header' title='" + name + "'>")
.append("<div class='popup_title'>" +
"<span class='ui-icon ui-icon-image popup_image' " +
"style='float:left'></span>" + name + "</div>"
)
.append($("<div class='refresh_popup' title='Refresh'>" +
"<span class='ui-icon ui-icon-arrowrefresh-1-s'>" +
"</span></div>")
.click( function() {
refreshCurrent( name );
})
)
.append($("<div class='min_popup' title='Minimize'><span class='ui-icon " +
"ui-icon-minus'></span></div>")
.click( function() {
var popup = $(this).parents('.popup');
popup.fadeOut('fast').queue(function(){
$(this).updatejTaskBar();
$(this).dequeue();
});
})
)
.append($("<div class='max_popup' title='Maximize'><span>\u25a1</span></div>")
.click( function() {
var div = $(this).parents('.popup');
var offset = div.offset();
if( !div.hasClass('popup_max') )
{
div.draggable( "destroy" );
div.resizable( "destroy" );
div.find('.popup_header').css( "cursor", "default" );
tempCache ( "putop" + div.attr("id"), offset.top );
tempCache ( "puleft" + div.attr("id"), offset.left );
tempCache ( "puheight" + div.attr("id"), div.height() );
tempCache ( "pubheight" +
div.find('.popup_body').attr("id"),
div.find('.popup_body').height() );
tempCache ( "puwidth" + div.attr("id"), div.width() );
tempCache ( "pubwidth" +
div.find('.popup_body').attr("id"),
div.find('.popup_body').width() );
div.addClass('popup_max')
.removeAttr('style')
.css("z-index", "10010")
.css( "height", $("#center").height() )
.css( "width", $("#center").width() );
div.find('.popup_body')
.addClass('popup_body_max')
.removeAttr('style')
.css( "height", $("#center").height() - 20 )
.css( "width", $("#center").width() - 2 );
div.find('.max_popup').attr('title', 'Restore')
.addClass('restore_popup')
.removeClass('max_popup')
.html("<span class='ui-icon ui-icon-newwin'></span>");
}
else
{
div.draggable({
'opacity': '0.7',
'cancel': '.popup_body',
'cursor': 'move',
'containment': '#center'
});
div.resizable({
'alsoResize': "#" + id + "pu",
'containment': '#center'
});
div.find('.popup_header').css( "cursor", "move" );
div.removeClass('popup_max')
.removeAttr('style')
.css("z-index", "10009")
.css( "max-height", $("#center").height() )
.css( "max-width", $("#center").width() );
div.find('.popup_body')
.removeClass('popup_body_max')
.removeAttr('style')
.css( "height", getTempCache( "pubheight" +
div.find('.popup_body').attr("id")) )
.css( "width", getTempCache( "pubwidth" +
div.find('.popup_body').attr("id")) )
.css( "max-height", $("#center").height() - 20 )
.css( "max-width", $("#center").width() );
div.find('.restore_popup').attr('title', 'Maximize')
.addClass('max_popup')
.removeClass('restore_popup')
.html("<span>\u25a1</span>");
}
})
)
.append($("<div class='close_popup' title='Close'>" +
"<span class='ui-icon ui-icon-close'></span></div></div>")
.click( function() {
var div = $(this).parents('.popup');
if( div.length != 0 && div.css( 'width' ) != "" )
{
permCache( "win-" + id + "-width", div.css( 'width' ) );
permCache( "win-" + id + "-height", div.css( 'height' ) );
}
$(this).parents('.popup').fadeOut('fast').queue(function(){
$(this).removeClass('popup');
$(this).updatejTaskBar();
updateCache( name );
var cb = window.prototype.cbs[ "windowclose" ];
if( cb != undefined )
{
cb( name );
}
$(this).dequeue();
});
window.location.hash = '';
})
)
.css( "cursor", "move" )
)
.append(
$("<div id='" + id + "pu' class='popup_body'></div>")
.css( {
"max-height": $("#center").height() - 20,
"max-width": $("#center").width()
})
)
.toggle(function() {
$(this).removeClass('popup');
})
.css({
'display': 'none',
'position': 'absolute',
'max-height': $("#center").height(),
'max-width': $("#center").width()
})
.resizable( resizeProps )
.draggable({
'opacity': '0.7',
'cancel': '.popup_body',
'cursor': 'move',
'containment': '#center',
'stack': '.popup',
start: function(event,ui){
$('#jTaskBar').find('.jTask').removeClass('jTask-current');
$('#jTaskBar').find('.jTask#' + id).addClass('jTask-current');
},
stop: function(event,ui){
var div = $(this);
permCache( "win-" + id + "-x", div.css( 'left' ) );
permCache( "win-" + id + "-y", div.css( 'top' ) );
}
})
.mousedown(function(){
$('#jTaskBar').find('.jTask').removeClass('jTask-current');
$('#jTaskBar').find('.jTask#' + id).addClass('jTask-current');
$('.popup').css( 'z-index', 1000 );
$(this).css( 'z-index', 1001 );
$(this).trigger( 'dragstart' ).trigger( 'drag' ).trigger( 'dragstop' );
})
.appendTo($('#center'));
}
function resizePopup( name )
{
var elem = getPopupContext( name );
resizeHeight( elem );
resizeWidth( elem );
}
function resizeHeight( element ) {
var elemsh = element.get(0).scrollHeight;
var elemh = element.height();
var elemtop = element.offset().top;
var centerh = $('#center').height();
var centertop = $('#center').offset().top;
var newHeight;
if( element.hasClass('popup_body_max') || elemsh > centerh )
{
newHeight = centerh - 22;
}
else
{
newHeight = elemsh;
}
if(newHeight > elemh)
{
element.parent().css('height', newHeight + 22);
element.css('height', newHeight);
}
if( elemtop + newHeight > centerh + centertop )
{
element.parent().css('top', centerh + centertop - newHeight - 22);
}
}
function resizeWidth( element )
{
var elemsw = element.get(0).scrollWidth;
var elemw = element.width();
var elemleft = element.offset().left;
var centerw = $('#center').width();
var centerleft = $('#center').offset().left;
var newWidth;
if(element.hasClass('popup_body_max') || elemsw > centerw)
{
newWidth = centerw;
}
else
{
newWidth = elemsw;
}
if(newWidth > elemw)
{
element.parent().css('width', newWidth);
element.css('width', newWidth);
}
if( elemleft + elemw > centerw + centerleft )
{
element.parent().css('left', centerw + centerleft - newWidth);
}
}
function getPopupContext( name )
{
name = name.toString().replace( " ", "_" );
return $('#' + name + 'pu');
}
function refreshCurrent( name )
{
doAjax( undefined, undefined, name );
}
function restoreForm(frm)
{
$("#" + frm + " input,#" + frm + " button").button( "enable" )
.button( "refresh" ).attr( "disabled", false )
.attr( "readonly", false );
}
/**
* @param headers 1-D array of headers
* @param values 2-D array of values
* @param cacheprefix Prefix for cache entries, must be unique
* @param postsortfunc Function(jQuery_table) to call after being sorted
* @param clearRegion Region for clearing temp cache entries
*/
function makeSortableTable( headers, values, cacheprefix, postsortfunc,
clearRegion )
{
var table = $("<table id='" + cacheprefix + "tbl'></table>");
var headerrow = $("<tr class='primaryRow'></tr>");
var i, j;
var printCells = function(values, table){
table.find( "tr:gt(0)" ).remove();
for( var i = 0; i < values.length; i++ )
{
var row = $("<tr></tr>");
if( ( i - 1 ) % 2 == 0 )
{
row.addClass( "alternateRow" );
}
else
{
row.addClass( "primaryRow" );
}
for( j = 0; j < headers.length; j++ )
{
row.append( "<td>" + values[ i ][ j ] + "</td>" );
}
table.append( row );
}
tempCache( cacheprefix + "-values", stringify( values ), clearRegion );
if( postsortfunc != undefined )
{
postsortfunc( table );
}
};
for( i = 0; i < headers.length; i++ )
{
var cell = $( "<th class='sorttblhead'>" + headers[ i ] + "</th>" );
if( values.length > 1 )
{
cell.prepend($('<span></span>').addClass('ui-icon').
addClass('ui-icon-arrowthick-2-n-s').
css( 'float', 'left' ));
cell.click(function(){
var sibth = $(this).siblings("th");
var sibicons = sibth.children(".ui-icon");
var thisicon = $(this).children(".ui-icon");
sibth.removeClass( "ui-state-hover" );
$(this).addClass( "ui-state-hover" );
sibicons.removeClass( 'ui-icon-arrowthick-1-s').
removeClass( 'ui-icon-arrowthick-1-n').
addClass( 'ui-icon-arrowthick-2-n-s');
thisicon.
removeClass( 'ui-icon-arrowthick-2-n-s').
removeClass( 'ui-icon-arrowthick-1-s').
removeClass( 'ui-icon-arrowthick-1-n');
var index = $(this).prevAll("th").length;
var valuestring = getTempCache( cacheprefix + "-values" );
eval( "values = " + valuestring );
var lastSort = getTempCache( cacheprefix + "-lastsort" );
var newSort = index;
var customsort = function(a,b){
var ca = Number(a);
var cb = Number(b);
if( !isNaN( ca ) )
{
if( !isNaN( cb ) )
{
return ca - cb;
}
return -1;
}
else if( !isNaN( cb ) || a == undefined )
{
return 1;
}
return a.localeCompare(b);
};
if( lastSort != index )
{
values.sort(function(a,b){
return customsort(a[index],b[index]);
});
thisicon.addClass( 'ui-icon-arrowthick-1-s');
}
else
{
values.sort(function(a,b){
return customsort(b[index],a[index]);
});
thisicon.addClass( 'ui-icon-arrowthick-1-n');
newSort = -1;
}
tempCache( cacheprefix + "-lastsort", newSort, clearRegion );
printCells( values, $("#" + cacheprefix + "tbl") );
});
}
headerrow.append( cell );
}
table.append( headerrow );
printCells( values, table );
return table;
}
/**
* Shows a generic error message with an okay dialog
* @param title The title
* @param msg The message to show
* @param cb The function to call after okay is pressed
*/
function genericErrorDialog( title, msg, cb )
{
genericDialog( title, msg, {
"Okay": function(){
if( cb )
{
cb();
}
$(this).dialog( "close" ).remove();
}
});
}
/**
* Shows a generic message with customizable buttons.
*
* The array of buttons should be a key/index array where the keys are the
* text to show and the values are the corresponding functions to run when
* the button has been clicked.
*
* @param title Title of the dialog
* @param msg Message to show the user
* @param buttons Array of buttons to show
*/
function genericDialog( title, msg, buttons )
{
$("body").append( '<div id="dialog-generic" ' + 'title="' + title +
'"><p>' + msg + '</p></div>');
$( "#dialog-generic" ).dialog({
resizable: false,
height:165,
width:360,
modal: true,
"buttons": buttons
});
}<file_sep>/**
* Updates the CPU server consumption. The CPU line is displayed
* significantly differently from the other three lines and thus has its own
* handler. This function performs all of the calculations and pipes the
* values into @see applyModificationToServerStat so that it may be updated.
* The temp cache "servercpuconsumption" is taken as an input for the used
* amount whereas the temp cache "servercpu" is the total. The temp cache
* "servercpuratio" is written to provide for access later.
*/
function updateServerConsumptionCPU( )
{
/**
* New CPU consumption
*/
var cpuSum = getServerDetailSum( "cpu" );
/**
* New CPU total available
*/
var total = toNumber( getTempCache( "servercpu" ) );
/**
* The ratio is 1:1 so let's have 2 decimal places
*/
var ratio = Math.round( total / cpuSum * 100 ) / 100;
/**
* Calculate the old sum
*/
var oldSum = toNumber( getTempCache( "servercpuconsumption" ) );
/**
* For CPU it is good to have less consumed, true/false
*/
var isGood = cpuSum < oldSum;
// Make sure we're not in an awkward rendering situation
if( total == oldSum && $('#servercpuconsumption').html() != "" )
{
return;
}
// In case there aren't any processes running, set the ratio to 0
if( ratio == Number.POSITIVE_INFINITY )
{
ratio = 0;
}
// Apply the modification
applyModificationToServerStat(
"servercpuconsumption",
cpuSum,
isGood,
cpuSum - oldSum,
function(elem,value){
$(elem).html(getProcessCount() + " @ " + ratio);
}
);
// Update the cache
tempCache( "servercpuratio", ratio, "Server-View" );
runTimeUpdater( undefined, undefined, undefined, undefined, true );
}
/**
* Updates a server consumption detail row. Uses first parameter to determine
* what type of consumption is being updated. Second parameter is used when
* a change to the total available occurs. Because CPU is handled specially,
* it is passed to @see updateServerConsumptionCPU.
*
* @param type One of "cpu", "ram", "hdd", "bw"
* @param newtotal New total value for the consumption row
*/
function updateServerConsumption( type, newtotal )
{
if( type == "cpu" )
{
updateServerConsumptionCPU();
return;
}
/**
* Calculated sum (new)
*/
var sum = getServerDetailSum( type );
/**
* Calculated total (either from the parameter or from the temp cache
*/
var total = newtotal == undefined ?
toNumber( getTempCache( "server" + type ) ) : newtotal;
/**
* Ratio of sum:total * 100 rounded to 2 decimal places (percentage)
*/
var ratio = Math.round( sum / total * 10000 ) / 100;
/**
* Old sum (from temp cache)
*/
var oldsum = toNumber( getTempCache( "server" + type + "consumption" ) );
/**
* Good means the new sum is less than the old sum
*/
var isGood = sum < oldsum;
// Abort if there are no changes
// Also abort if the element is already populated
if( sum == oldsum && $('#server' + type + 'consumption').html() != "" )
{
return;
}
// Set the ratio to 0 if it is infinity
if( ratio == Number.POSITIVE_INFINITY )
{
ratio = 0;
}
// And apply the modification pretty like
applyModificationToServerStat(
"server" + type + "consumption",
sum,
isGood,
sum - oldsum,
function(elem,value){
$(elem).html(sum + " (" + ratio + "%)");
}
);
}
/**
* Updates server consumptions for "hdd", "ram", "bw" and "cpu"
*/
function updateAllServerConsumptions( )
{
updateProcessConsumptions();
updateServerConsumption( "hdd" );
}
/**
* Updates server consumptions for "cpu", "ram" and "bw"
*/
function updateProcessConsumptions( )
{
updateServerConsumptionCPU();
updateServerConsumption( "ram" );
updateServerConsumption( "bw" );
}
/**
* Called when a server detail has been updated/created. If oldvalue is not
* defined then the detail is simply written, otherwise a modification animation
* is performed.
*
* @param type One of "hdd", "ram", "bw" and "cpu"
* @param value New value for the server detail
* @param oldvalue Old value for the server detail
*/
function updateServerDetail( type, value, oldvalue )
{
if( oldvalue == undefined )
{
$("#server" + type).html( value );
}
else
{
applyModificationToServerStat(
"server" + type,
value,
value > oldvalue,
value - oldvalue
);
}
updateServerConsumption( type, value );
}
/**
* Sets the last time the server was updated in the temp cache and forces the
* CPU detail to recompute.
*
* @param lastTime Last time the server was updated (in seconds)
*/
function lastServerUpdateTime( lastTime )
{
tempCache( "lastServerUpdateTime", lastTime, "Server-View" );
updateServerConsumptionCPU();
}
/**
* Generates the DOM object for a server detail row
*
* @param type Type of detail that is being generated (used in IDs)
* @param title Title for the entire row
*
* @return Resulting DOM object
*/
function generateServerDetailRow( type, title )
{
type = type.toString();
/**
* Delimiter between sum and the total
*/
var delimiter = type == "cpu" ? "=" : "/";
/**
* Row that will hold all of the cells
*/
var row = $('<tr></tr>').attr('title', title);
// First cell (Type/Horizontal Header)
row.append( $('<td></td>').css('text-transform', 'uppercase').html( type ));
// Second Cell (Consumption)
row.append( $('<td></td>').append(
$('<span></span>').attr('id', 'server' + type + 'consumption')
));
// Third cell (delimiter/spacer)
row.append( $('<td></td>').html( delimiter ) );
// Fourth/last cell (value)
row.append( $('<td></td>').append(
$('<span></span>').attr('id', 'server' + type)
));
// Center align all the cells
row.children("td").css( "text-align", "center" );
return row;
}
/**
* Update the temp cache for a server's name
*
* @param id ID of the server
* @param name The new name of the server
*/
function changedServerName( id, name )
{
if( name == '' )
{
name = "Server #" + id;
}
tempCache( "server-" + id + "-customname", name, "Server-View", true );
}
/**
* Update the temp cache for a program's name
*
* @param id ID of the program
* @param name The new name of the program
*/
function changedProgramName( id, name )
{
if( name == '' )
{
name = intToProgramType( getTempCache( "program-" + id + "-type" ) ) +
" #" + id;
}
tempCache( "program-" + id + "-customname", name, "Server-View", true );
}
/**
* Starts a server view. @see endServerView must also be called when all
* programs and processes have been added. Programs are added through @see
* serverPrograms or @see noServerPrograms. Processes are added through @see
* serverProcesses or @see noServerProcesses. This function will create the
* needed layout for everything to work together. It will create the detail
* table along with setting all of the temp cache values for the server. It
* will also create a customizable name field that is the same as the one on the
* server overview form.
*
* @param id Unique ID of the server
* @param owner ID of the owner (typically the current user)
* @param ip IP of the server (int format)
* @param customname Custom name of the server
* @param cpu Total CPU the server has
* @param ram Total RAM the server has
* @param hdd Total HDD the server has
* @param bw Total bandwidth the server has
* @param lastUpdate Last time the server was updated (in secs)
*/
function beginServerView( id, owner, ip, customname, cpu, ram, hdd, bw,
lastUpdate )
{
/**
* Cache region for the temp cache
*/
var cache = "Server-View";
// If the custom name is not set, set it.
customname = verifyServerName( id, customname );
// Set up the server view table
var context = getPopupContext( "Servers" );
context.html( "" );
context.append( $("<table style='width:100%'></table>")
.append( $( "<tr></tr>" )
.append( $( "<th colspan=3></th>" )
.append( createUpdateableInput( "server-" + id + "-customname",
customname, "changeservername", "SERVER_ID", id ) ) )
.append( $( "<th></th>" )
.append( " IP: <span id='serverip'></span>" ) ) )
.append( "<tr><th>Region</th><th>Current</th><th></th><th>Total</th></tr>" )
.append( generateServerDetailRow( "cpu", "Distributed to each " +
"running program. Determines the rate at which processes " +
"complete." ) )
.append( generateServerDetailRow( "ram", "Required to run programs. " +
"Cannot be exceeded." ) )
.append( generateServerDetailRow( "hdd", "Required for programs to " +
"be stored/researched. May not be exceeded." ) )
.append( generateServerDetailRow( "bw", "Determines rate at which " +
"files are downloaded from external servers." ) )
);
// Add the two divs for programs and processes
context.append("<div id='programdiv'></div>");
context.append("<div id='processdiv'></div>");
// Set all the temp cache values
tempCache( "currentserver", id, cache );
tempCache( "serverowner", owner, cache );
tempCache( "serverip", ip, cache, function(elem, val) {
$(elem).html( intToIP( val ) );
});
tempCache( "server-" + id + "-customname", customname, cache );
tempCache( "servercpu", cpu, cache );
tempCache( "serverram", ram, cache );
tempCache( "serverhdd", hdd, cache );
tempCache( "serverbw", bw, cache );
tempCache( "processes" );
tempCache( "programs" );
tempCache( "lastServerUpdateTime", lastUpdate, cache );
}
/**
* Finishes a server view. Ensures the popup is properly visible, updates all
* of the server detail rows and clears all other cache regions for Servers.
*/
function endServerView()
{
resizePopup( "Servers" );
updateProgramOperations();
updateServerDetail( "ram", getTempCache( "serverram" ) );
updateServerDetail( "hdd", getTempCache( "serverhdd" ) );
updateServerDetail( "bw", getTempCache( "serverbw" ) );
updateServerDetail( "cpu", getTempCache( "servercpu" ) );
updateCache( "Servers", "Server-View" );
}
/**
* No server programs are on the server, calls @see enableFreePrograms.
*/
function noServerPrograms()
{
$('#programdiv').html( "This server has no programs!" );
enableFreePrograms();
}
/**
* Creates a table for the programs to reside in. Each program is added to the
* table via @see addServerProgram
*/
function serverPrograms( list )
{
$('#programdiv').html( "<table id='programtable' style='width:100%'>" +
"<thead><th>Program Type" +
"</th><th>Size (MB)</th><th>Version</th><th>" +
"Operation</th></thead></table>" );
for( var i = 0; i < list.length; i++ )
{
var pro = list[ i ];
addServerProgram( pro[ 0 ], pro[ 1 ], pro[ 2 ], pro[ 3 ], pro[ 4 ],
pro[ 5 ] );
}
resizePopup( "Servers" );
}
/**
* Checks if free programs should be enabled. This will call @see
* enableFreePrograms if the server is missing one of FW/PW Breaker/Bypasser.
*/
function checkFreePrograms()
{
var programstring = getTempCache( "programs" ).toString();
var programs = new Array();
if( programstring != "" )
{
programs = programstring.split( "," );
}
else
{
enableFreePrograms();
return;
}
// If any of these are not true at the end of the program listing,
// then the user can opt to instantly get a L1 of each for free
var hasFWD = false;
var hasFWB = false;
var hasPWD = false;
var hasPWB = false;
for( var i = 0; i < programs.length; i++ )
{
var progid = programs[ i ];
var type = toNumber( getTempCache( "program-" + progid + "-type" ) );
// Check if this type is accounted for
switch( type )
{
case 1:
hasFWD = true;
break;
case 2:
hasFWB = true;
break;
case 3:
hasPWD = true;
break;
case 4:
hasPWB = true;
}
}
// Check if the user is missing one of the basics
if( hasFWD && hasFWB && hasPWD && hasPWB )
{
$('#freeprogramdiv').remove();
}
else
{
enableFreePrograms();
}
}
/**
* Enables getting free programs
*/
function enableFreePrograms()
{
$('#freeprogramdiv').remove();
$('#programdiv').prepend( "<div id='freeprogramdiv'>You are missing " +
"critical programs that may be loaded from CD. <a href='#' " +
"id='loadfreeprogram'>Load Now</a></div>" );
$('#loadfreeprogram').click(function( evt ){
doAjax( "freeprograms", {
SERVER_ID: getTempCache('currentserver')
});
});
}
/**
* Adds a program to the current server. Starts by adding the necessary HTML
* to the program table. Adds the correct callbacks for the drop down actions.
* Set up the temp cache vars. Finally, check if free programs can be updated.
*
* @param id Unique ID of the program
* @param serverid ID of the server the program belongs to
* @param customname Custom name of the program (defaults to Type #ID)
* @param type Type of programs, text from @see intToProgramType
* @param size Size of the program, calculated from server defs
* @param version Version of the program
*/
function addServerProgram( id, serverid, customname, type, size, version )
{
// Set a good custom name if it is emtpy
if( customname == '' )
{
customname = intToProgramType( type ) + " #" + id;
}
var cache = "Server-View";
// DOM Object to add to the programs table
$("<tr></tr>").attr( "id", "program-" + id + "-row" ).append(
// Program name/type
$("<td></td>").append(
createUpdateableInput( "program-" + id + "-customname",
customname, "changeprogramname", "PROGRAM_ID", id )
.attr("name", "type"))
).append(
$("<td></td>").attr({
id: "program-" + id + "-size",
name: "size"
})
).append(
$("<td></td>").attr({
id: "program-" + id + "-version",
name: "version"
})
).append(
$("<td></td>").append(
$("<select></select>").attr( "id", "program-" + id + "-select")
.append( "<option>Select one...</option>" +
"<option id='research-" + id + "'>Research</option>" +
"<option id='delete-" + id + "'>Delete</option>" +
"<option id='exchange-" + id + "'>Exchange</option>" +
"<option id='execute-" + id + "'>Execute</option>" )
.change(function(evt){
var value = $(this).val();
var checker = function(name,callback) {
name = name.toString();
if( value == name && $("#" + name.toLowerCase() + "-" + id)
.hasClass( "doableOperation" ) )
{
callback();
}
};
checker( "Research", function(){
doAjax( "startresearch", {
PROGRAM_ID: id
});
});
checker( "Delete", function(){
doAjax( "startdelete", {
PROGRAM_ID: id
});
});
checker( "Exchange", function(){
startExchangeProgram( id );
});
checker( "Execute", function(){
doAjax( "executeprogram", {
PROGRAM_ID: id
});
});
checker( "Halt", function(){
doAjax( "haltprogram", {
PROGRAM_ID: id
});
});
if( value != "Select one..." )
{
$(this).val( "Select one..." );
}
})
)
).appendTo( $( "#programtable" ) );
tempCache( "program-" + id + "-server", serverid, cache );
tempCache( "program-" + id + "-type", type, cache, function(elem,val){
$(elem).html( intToProgramType( val ) );
});
tempCache( "program-" + id + "-customname", customname, cache );
tempCache( "program-" + id + "-size", size, cache, true );
tempCache( "program-" + id + "-version", version, cache, true );
addTempCacheList( "programs", id, cache );
checkFreePrograms();
}
/**
* Removes a server program. Hides the table row, updates the cache values and
* calls the optional callbacks. The first callback is called immediately after
* the row has hidden and before the temp cache values are updated. The second
* callback is called after the temp cache values are updated.
*
* @param id ID of the program to remove
* @param callback Pre-temp cache update callback to call (optional)
* @param postcallback Post-temp cache update callback to call (optional)
*/
function removeServerProgram( id, callback, postcallback )
{
var row = $( "#program-" + id + "-row" );
row.hide(1000, function(){
if( callback != undefined )
{
callback( id );
}
$(this).remove();
tempCache( "program-" + id + "-server" );
tempCache( "program-" + id + "-type" );
tempCache( "program-" + id + "-size" );
tempCache( "program-" + id + "-version" );
removeTempCacheList( "programs", id, "Server-View" );
if( postcallback != undefined )
{
postcallback( id );
}
if( getTempCache( "programs" ) == "" )
{
noServerPrograms();
}
checkFreePrograms();
});
}
/**
* Called when the server has been given the free program(s). If any of the IDs
* are set to 0 then the program was not added.
*
* @param fwdid Firewall defender ID
* @param fwbid Firewall breaker ID
* @param pwdid Password defender ID
* @param pwbid Password breaker ID
*/
function grantedFreePrograms( fwdid, fwbid, pwdid, pwbid )
{
// Get rid of the free program DIV as it is no longer applicable.
$('#freeprogramdiv').replaceWith( "" );
/**
* jQuery object of the program table
*/
var programtable = $('#programtable');
// Create the program table if it does not already exist.
if( programtable.length == 0 )
{
$('#programdiv').html( "<table id='programtable'></table>" );
programtable = $('#programtable');
}
var serverid = getTempCache('currentserver');
// Add the Firewall Defender if it was added
if( fwdid != 0 )
{
addServerProgram( fwdid, serverid, "", 1, getProgramSize( 1, 1 ), 1 );
}
// Add the Firewall Breaker if it was added
if( fwbid != 0 )
{
addServerProgram( fwbid, serverid, "", 2, getProgramSize( 2, 1 ), 1 );
}
// Add the Password Defender if it was added
if( pwdid != 0 )
{
addServerProgram( pwdid, serverid, "", 3, getProgramSize( 3, 1 ), 1 );
}
// Add the Password Breaker if it was added
if( pwbid != 0 )
{
addServerProgram( pwbid, serverid, "", 4, getProgramSize( 4, 1 ), 1 );
}
// Update available program operations and all detail rows.
updateProgramOperations();
updateAllServerConsumptions();
}
/**
* Updates which operations are able to be performed by each program. Each
* dropdown option will be enabled/disabled based on its criteria. If the
* option is disabled it will have a title set for why it is disabled.
*/
function updateProgramOperations( )
{
// Get the current program listing as an array. If there are no programs,
// simply return
var programstring = getTempCache( "programs" ).toString();
var programs = new Array();
if( programstring != "" )
{
programs = programstring.split( "," );
}
else
{
return;
}
// Get the current process listing as an array
var processstring = getTempCache( "processes" ).toString();
var processes = new Array();
if( processstring != "" )
{
processes = processstring.split( "," );
}
// Set up some vars.
var i, program;
// Set up some arrays that determine if a program can perform an operation
var cantResearch = new Array();
var cantDelete = new Array();
var cantExchange = new Array();
// If a process is being deleted, it can't be researched
for( i = 0; i < processes.length; i++ )
{
var processid = processes[ i ];
var operation = getTempCache( "process-" + processid + "-operation" );
var opstring = intToProcessOperation( operation );
program = getTempCache( "process-" + processid + "-target" );
if( opstring == "Delete" )
{
cantResearch.push( program );
}
// Can't delete if already doing something
cantDelete.push( program );
cantExchange.push( program );
}
var freehdd = getServerDetailAvailable( "hdd" );
var freeram = getServerDetailAvailable( "ram" );
for( i = 0; i < programs.length; i++ )
{
var programid = programs[ i ];
var programtype = getTempCache( "program-" + programid + "-type" );
var hddavail = getProgramSize( programtype, 1 ) < freehdd;
var ramavail = getDefault( "RESEARCH_RAM" ) < freeram;
var researchobj = $('#research-' + programid);
var deleteobj = $('#delete-' + programid);
var exchangeobj = $('#exchange-' + programid);
var errorstring = "";
// Update the research button accordingly
if( cantResearch.indexOf( programid ) != -1 )
{
setOperationEnabled( researchobj );
errorstring = "Can't research because program is already being " +
"deleted. ";
}
else if( !hddavail )
{
setOperationEnabled( researchobj );
errorstring = "Can't research because there is not enough HDD " +
"space available. "
}
else if( !ramavail )
{
setOperationEnabled( researchobj );
errorstring = "Can't research because there is not enough RAM " +
"to run a research process. ";
}
else
{
setOperationEnabled( researchobj, true );
}
// And the delete one
if( cantDelete.indexOf( programid ) != -1 )
{
setOperationEnabled( deleteobj );
errorstring += "Can't delete because another operation is " +
"already being performed. ";
}
else
{
setOperationEnabled( deleteobj, true );
}
// And also the exchange one
if( cantExchange.indexOf( programid ) != -1 )
{
setOperationEnabled( exchangeobj );
errorstring += "Can't exchange because another operation is " +
"already being performed. ";
}
else if( getTempCache( "program-" + programid + "-version" ) == "1" )
{
setOperationEnabled( exchangeobj );
errorstring += "Can't exchange because this program is only " +
"version 1. ";
}
else
{
setOperationEnabled( exchangeobj, true );
}
$('#program-' + programid + '-select').attr( 'title', errorstring );
}
}
/**
* Sets an operation to enabled/disabled. Enabling will add the doableOperation
* class and remove the disabled attribute. Disabling will add the
* disabledOperation class and add the disabled attribute.
*
* @param obj jQuery object to change
* @param enabled Whether to enable/disable
*/
function setOperationEnabled( obj, enabled )
{
if( enabled != true )
{
obj.addClass( 'disabledOperation' )
.removeClass( 'doableOperation' )
.attr( "disabled", "disabled" );
}
else
{
obj.addClass( 'doableOperation' )
.removeClass( 'disabledOperation' )
.removeAttr( "disabled" );
}
}<file_sep>createWindow( "Tower D", {minWidth: 550} );
addMenuButton( "Tower D", "ui-icon-image", function(){runTowerD();} );
Array.prototype.toString = function()
{
var ret = "[";
for( var i = 0; i < this.length; i++ )
{
ret += this[ i ];
if( i != this.length - 1 )
{
ret += ",";
}
}
return ret + "]";
};
var td = {
baddyhpstep: 5,
baddybasespawndistance: 300,
baddybasespeed: 5,
baddyspeedstep: 1,
towerbaseatk: 5,
towerbaserange: 250,
towerrangestepcost: 5,
towerrangestep: 10,
towerbasemaxhp: 100,
towerhpstep: 10,
towerhpstepcost: 50,
archerbaserange: 150,
archerrangestep: 5,
archerbasedamage: 1,
archerdamagestep: 1,
baddytypes: [
{
hp: 75,
name: "Bunny",
speed: 0,
multiplier: 0,
image: "icon-bunny"
},
{
hp: 100,
name: "Chihuahua",
speed: 0,
multiplier: 0.04,
image: "icon-chihuahua"
},
{
hp: 80,
name: "Rat",
speed: 3,
multiplier: 0.1
},
{
hp: 100,
name: "<NAME>",
speed: 0,
multiplier: 0.1
},
{
hp: 200,
name: "Snake",
speed: 1,
multiplier: 0.15
}
],
complexities: [
[ 0, 0, 0, 0, 0 ],
[ 1, 0, 0, 0, 0 ]
]
};
function disableModuleTOWERD()
{
deleteAllElementsById( "Tower D" );
}
function getTowerInitial( key, def )
{
if( def == undefined )
{
def = 0;
}
var initial = getPermCache( key );
if( initial == "" )
{
initial = def;
}
initial = toNumber( initial );
if( isNaN( initial ) )
{
initial = def;
}
return initial;
}
function createAlterableButton( txt, spanid, buttonid )
{
return txt + ": <span id='" + spanid + "'></span><button id='" +
buttonid + "'></button><div class='clear'></div>";
}
function runTowerD()
{
// Quick var instantiation
var i;
// Cosmetics
$("#center #Tower_D");
// Unpack baddies
var baddies = [], baddiesstr = getPermCache( "Baddies" );
if( baddiesstr != "" )
{
eval( "baddies=" + baddiesstr );
}
document.baddies = new Array();
for( i = 0; i < baddies.length; i++ )
{
var currbaddy = baddies[ i ];
var hp = currbaddy[ 0 ], multiplier = 0, speed = td.baddybasespeed,
distance = td.baddybasespawndistance, name = td.baddytypes[ 0 ].name;
if( currbaddy.length >= 2 )
{
multiplier = currbaddy[ 1 ];
}
if( currbaddy.length >= 3 )
{
speed = currbaddy[ 2 ];
}
if( currbaddy.length >= 4 )
{
distance = currbaddy[ 3 ];
}
if( currbaddy.length >= 5 )
{
name = currbaddy[ 4 ];
}
document.baddies.push( createBaddy( hp, multiplier, speed, distance, name ) );
}
// Unpack expansions
var expansions = [], expansionstr = getPermCache( "Expansions" );
if( expansionstr != "" )
{
eval( "expansions=" + expansionstr );
}
document.expansions = new Array();
// Unpack totals
document.gold = getTowerInitial( "Gold" );
document.totalgold = getTowerInitial( "TotalGold" );
document.totalbaddykills = getTowerInitial( "TotalBaddyKills" );
// Unpack Tower Upgrades
document.baseatkupgrades = getTowerInitial( "BaseAttackUpgrades" );
document.baserangeupgrades = getTowerInitial( "BaseRangeUpgrades" );
document.basemaxhpupgrades = getTowerInitial( "BaseHPUpgrades" );
document.basehp = getTowerInitial( "BaseHP", td.towerbasemaxhp );
// Unpack Baddy Upgrades
document.baddyhpup = getTowerInitial( "BaddyHPUp" );
document.baddyspeedup = getTowerInitial( "BaddySpeedUp" );
// Unpack Baddy Level
document.baddycomplexity = getTowerInitial( "BaddyComplexity", 1 );
document.seenbaddies = [];
// Setup window
var w = getPopupContext( "Tower D" );
w.html( "<table border=0 style='width:100%' id='tdHdrTbl'><tr><td>Gold: <span id='Gold'></span></td>" +
"<td>Baddies: <span id='BaddyCount'></span></td>" +
"<td>HP: <span id='BaseHP'></span>/<span id='BaseMaxHP'></span></td></tr></table>" );
w.append("<div id='TowerTabs'>" +
"<ul>" +
"<li><a href='#TabTower'>Tower</a></li>" +
"<li><a href='#TabBaddies'>Mobs</a></li>" +
"<li><a href='#TabExpansions'>Expansions</a></li>" +
"<li><a href='#TabTStats'>Stats</a></li>" +
"<li><a href='#TabBeastiary'>Beastiary</a></li>" +
"<li><a href='#TabTowerView'>View Tower</a></li>" +
"</ul>" +
"<div id='TabTower'></div>" +
"<div id='TabBaddies'></div>" +
"<div id='TabExpansions'></div>" +
"<div id='TabTStats'></div>" +
"<div id='TabBeastiary'></div>" +
"<div id='TabTowerView'></div>" +
"</div>" );
$("#TowerTabs li a").css( "padding", "0.2em" );
$("#TowerTabs").tabs({idPrefix: 'Tab'});
$("#TabTower")
.append( createAlterableButton( "Base Attack", "BaseAttack", "tdIncreaseBaseAtk" ) )
.append( createAlterableButton( "Base Range", "BaseRange", "tdIncreaseBaseRange" ) )
.append( createAlterableButton( "Base Max HP", "BaseMaxHPCopy", "tdIncreaseBaseHP" ) )
.append( "Base HP: <span id='BaseHPCopy'></span><button id='tdRestoreBaseHP'>Restore 1 HP</button>")
.append("<button id='tdRestoreBaseHPAll'></button><div class='clear'></div>");
// Baddies Tab
$("#TabBaddies")
.append( createAlterableButton( "Initial HP", "InitialBaddyHP", "tdIncreaseBaddyHP" ) )
.append( createAlterableButton( "Initial Speed", "InitialBaddySpeed", "tdIncreaseBaddySpeed" ) )
.append( createAlterableButton( "Complexity", "BaddyComplexity", "tdIncreaseBaddyComplexity" ) )
.append( "Initial Spawn Distance: " + td.baddybasespawndistance );
$("#TabExpansions")
.append( "<button id='tdIncreaseExpansion'></button><div class='clear'></div>" )
.append( "<div id='ExpansionTabs'>No expansions purchased.</div>" );
$("#TabTStats")
.append( "Total Kills: <span id='TotalBaddyKills'>" + document.totalbaddykills + "</span><div class='clear'></div>")
.append( "Total Gold: <span id='TotalGold'>" + document.totalgold + "</span><div class='clear'></div>" );
$("#TabBeastiary")
.append( "<div id='BeastiaryAccordion'></div>" );
$("#tdIncreaseBaseAtk").button().click(function(){
var cost = getIncreaseBaseAtkCost();
if( document.gold < cost )
{
return;
}
adjustGold( -cost );
increaseBaseAttack();
});
$("#tdIncreaseBaseRange").button().click(function(){
var cost = getIncreaseBaseRangeCost();
if( document.gold < cost )
{
return;
}
adjustGold( -cost );
increaseBaseRange();
});
$("#tdIncreaseBaseHP").button().click(function(){
var cost = getIncreaseBaseHPCost();
if( document.gold < cost )
{
return;
}
adjustGold( -cost );
increaseBaseHP();
});
$("#tdRestoreBaseHP").button().click(function(){
if( document.gold < 1 || document.basehp == getMaxHP() )
{
return;
}
adjustGold( -1 );
document.basehp++;
updateHP();
});
$("#tdRestoreBaseHPAll").button().click(function(){
var cost = getMaxHP() - document.basehp;
if( document.gold < cost || cost == 0 )
{
return;
}
adjustGold( -cost );
document.basehp += cost;
updateHP();
});
$("#tdIncreaseBaddyHP").button().click(function(){
increaseBaddyHP();
});
$("#tdIncreaseBaddySpeed").button().click(function(){
increaseBaddySpeed();
});
$("#tdIncreaseExpansion").button().click(function(){
var cost = getIncreaseExpansionCost();
if( document.gold < cost )
{
return;
}
adjustGold( -cost );
increaseExpansion();
});
$("#tdIncreaseBaddyComplexity").button().click(function(){
increaseBaddyComplexity();
});
updateIncreaseBaseAtkButton();
updateIncreaseBaseRangeButton();
updateIncreaseBaseHPButton();
updateIncreaseBaddyHPButton();
updateIncreaseBaddySpeedButton();
updateIncreaseBaddyComplexity();
updateIncreaseExpansionButton();
permCache( "Gold", document.gold, true );
permCache( "BaddyCount", document.baddies.length, true );
permCache( "TotalGold", document.totalgold, true );
permCache( "TotalBaddyKills", document.totalbaddykills, true );
// Check the beastiary
for( i = 0; i < document.baddycomplexity; i++ )
{
var wave = td.complexities[ i ];
for( var j = 0; j < wave.length; j++ )
{
var baddy = wave[ j ];
if( document.seenbaddies.indexOf( baddy ) == -1 )
{
addBeastiary( baddy );
document.seenbaddies.push( baddy );
}
}
}
// Setup expansions
for( i = 0; i < expansions.length; i++ )
{
increaseExpansion();
var currdata = expansions[ i ];
var type = currdata.shift();
switch( type )
{
case 1:
buildArcheryRange( currdata );
break;
case 2:
buildBarracks( currdata );
break;
case 3:
buildBlacksmith( currdata );
break;
}
}
updateIncreaseExpansionButton();
// Expansions force focus back to them, sooo...reset focus back to first
$("#TowerTabs").tabs( "select", 0 );
// Start game loop
document.towerdinterval = setInterval( "towerDLoop();", 500 );
}
function adjustGold( amt )
{
document.gold += amt;
var rounded = Math.round( document.gold * 100 );
document.gold = rounded / 100;
permCache( "Gold", document.gold );
if( amt > 0 )
{
document.totalgold += amt;
rounded = Math.round( document.totalgold * 100 );
document.totalgold = rounded / 100;
permCache( "TotalGold", document.totalgold, true );
}
var formatted = document.gold;
if( formatted == Math.round( formatted ) )
{
formatted = formatted + ".00";
}
else
{
var gt10 = formatted * 10;
if( gt10 == Math.round( gt10 ) )
{
formatted = formatted.toString() + "0";
}
}
$("#Gold").html( formatted );
}
// Expansions
function increaseExpansion()
{
document.expansions.push( [0] );
permCache( "Expansions", document.expansions );
var exp = document.expansions.length;
if( $("#ExpansionTabs").children( "ul" ).size() == 0 )
{
$("#ExpansionTabs").html( "" ).append("<ul></ul>");
}
$("#ExpansionTabs ul").append( "<li><a href='#TabExpansion" + exp + "'>Expansion " + exp + "</a></li>" );
$("#ExpansionTabs").append( "<div id='TabExpansion" + exp + "'></div>" );
var tab = $("#TabExpansion" + exp );
tab.append( "<div style='display:none' class='id'>" + exp + "</div>" );
// Create/add buttons to build the expansions
var archeryRange = $("<button>Build Archery Range</button>")
.appendTo( tab ).button().click(function(){
buildArcheryRange();
});
tab.append( "<div class='clear'></div>" );
var barracks = $("<button>Build Barracks</button>")
.appendTo( tab ).button().click(function(){
buildBarracks();
});
tab.append( "<div class='clear'></div>" );
var blacksmith = $("<button>Build Blacksmith</button>")
.appendTo( tab ).button().click(function(){
buildBlacksmith();
});
tab.append( "<div class='clear'></div>" );
// Only allow one of each the unique buildings
for( var i = 0; i < document.expansions.length; i++ )
{
switch( document.expansions[ i ][ 0 ] )
{
case 1:
disableExpansionButton( archeryRange, "archery range" );
break;
case 2:
disableExpansionButton( barracks, "barracks" );
break;
case 3:
disableExpansionButton( blacksmith, "blacksmith" );
break;
}
}
// Refresh the tabs
$("#ExpansionTabs").tabs( "destroy" ).tabs({idPrefix: 'Tab'});
$("#TowerTabs").tabs( "destroy" ).tabs({idPrefix: 'Tab'}).tabs( "select", 2 );
updateIncreaseExpansionButton();
}
function disableExpansionButton( button, type )
{
button.button( "disable" ).button( "option", "label", "Only one " + type + " is allowed." );
}
// Archery range param details:
// Count, Range, Damage, Type
function buildArcheryRange( params )
{
var data = getExpansionData( [ 1, 0, 0, 0 ], params, 1 );
data.o.append( "<h2>Archery Range</h2>" )
.append( "Number:<span id='tdExpArcherCount'>1</span>" )
.append( $("<button class='tdExpArcherCount'></button>").button().click(function(){
var tab = $(this).siblings(".id").text();
var currCount = document.expansions[ tab - 1 ][ 1 ];
var cost = getExpArcherIncreaseCost( currCount );
if( document.gold < cost )
{
return;
}
currCount++;
document.expansions[ tab - 1 ][ 1 ] = currCount;
permCache( "Expansions", document.expansions );
updateExpArcherCount( tab, currCount );
})).append( "<div class='clear'></div>" )
.append( "Range:<span id='tdExpArcherRange'>" + td.archerbaserange + "</span>" )
.append( $("<button class='tdExpArcherRange'></button>").button().click(function(){
var tab = $(this).siblings(".id").text();
var currRangeUpgrade = document.expansions[ tab - 1 ][ 2 ];
var cost = getExpArcherRangeIncreaseCost( currRangeUpgrade );
if( document.gold < cost )
{
return;
}
currRangeUpgrade++;
document.expansions[ tab - 1 ][ 2 ] = currRangeUpgrade;
permCache( "Expansions", document.expansions );
updateExpArcherRange( tab, currRangeUpgrade );
})).append( "<div class='clear'></div>" )
.append( "Damage:<span id='tdExpArcherDamage'>" + td.archerbasedamage + "</span>" )
.append( $("<button class='tdExpArcherDamage'></button").button().click(function(){
var tab = $(this).siblings(".id").text();
var currDamageUpgrade = document.expansions[ tab - 1 ][ 3 ];
var cost = getExpArcherDamageIncreaseCost( currRangeUpgrade );
if( document.gold < cost )
{
return;
}
currDamageUpgrade++;
document.expansions[ tab - 1 ][ 3 ] = currDamageUpgrade;
permCache( "Expansions", document.expansions );
updateExpArcherDamage( tab, currDamageUpgrade );
})).append( "<div class='clear'></div>" );
updateExpArcherCount( data.t, data.p[ 1 ] );
updateExpArcherRange( data.t, data.p[ 2 ] );
updateExpArcherDamage( data.t, data.p[ 3 ] );
}
function getExpArcherIncreaseCost( currCount )
{
return 1000 + ( ( currCount - 1 ) * 250 );
}
function updateExpArcherCount( tab, currCount )
{
var cost = getExpArcherIncreaseCost( currCount );
$("#tdExpArcherCount").html( currCount );
$(".tdExpArcherCount").button( "option", "label", "+1 Archer, " + cost + " gold" );
}
function getExpArcherTab( )
{
for( var i = 0; i < document.expansions.length; i++ )
{
if( document.expansions[ i ][ 0 ] == 1 )
{
return i;
}
}
return -1;
}
function getExpArcherRangeIncreaseCost( currRange )
{
return 500 + ( ( currRange - 1 ) * 500 );
}
function getExpArcherActualRange( )
{
var tab = getExpArcherTab();
return td.archerbaserange + ( td.archerrangestep * document.expansions[ tab - 1 ][ 2 ] );
}
function updateExpArcherRange( tab, currRange )
{
var cost = getExpArcherRangeIncreaseCost( currRange );
var actualRange = getExpArcherActualRange();
$("#tdExpArcherRange").html( actualRange );
$(".tdExpArcherRange").button( "option", "label", "+" + td.archerrangestep +
" range, " + cost + " gold" );
}
function getExpArcherDamageIncreaseCost( currDamage )
{
return 2000 + ( ( currDamage - 1 ) * 1000 );
}
function getExpArcherActualDamage( )
{
var tab = getExpArcherTab();
return td.archerbasedamage + ( td.archerdamagestep * document.expansions[ tab - 1 ][ 3 ] );
}
function updateExpArcherDamage( tab, currDamage )
{
var cost = getExpArcherDamageIncreaseCost( currDamage );
var actualDamage = getExpArcherActualDamage();
$("#tdExpArcherDamage").html( actualDamage );
$(".tdExpArcherDamage").button( "option", "label", "+" + td.archerdamagestep +
" damage, " + cost + " gold" );
}
// Barracks param details:
// Count, Health, Damage, Type
function buildBarracks( params )
{
var data = getExpansionData( [ 1, 0, 0, 0 ], params, 2 );
data.o.append( "<h2>Barracks</h2>" );
}
// Blacksmith param details:
// Armor, Ranged Weapons, Melee Weapons
function buildBlacksmith( params )
{
var data = getExpansionData( [ 0, 0, 0 ], params, 3 );
data.o.append( "<h2>Blacksmith</h2>" );
}
// Returns object with:
// p: parameter data
// t: tab number
// o: Object for putting data in
function getExpansionData( def, input, type )
{
var params, tab;
if( input == undefined )
{
params = def;
tab = $(this).siblings(".id").text();
}
else
{
while( input.length < def.length )
{
input.push( 0 );
}
tab = document.expansions.length;
params = input;
}
var object = $("#ExpansionTabs div#TabExpansion" + tab);
object.children().not("[class='id']").remove();
var paramscopy = params;
paramscopy.unshift( type );
document.expansions[ tab - 1 ] = paramscopy;
permCache( "Expansions", document.expansions );
return {
p: params,
t: tab,
o: object
};
}
function getIncreaseExpansionCost()
{
var expansions = getExpansion();
var power = 1 + ( 0.05 * expansions );
return Math.round( Math.pow( 1000, power ) );
}
function updateIncreaseExpansionButton()
{
var cost = getIncreaseExpansionCost();
var expansion = getExpansion();
$("#tdIncreaseExpansion").button( "option", "label", "Add Expansion(+1 layer, " + cost + " gold)" );
}
function getExpansion()
{
return document.expansions.length;
}
// Base Attack
function increaseBaseAttack()
{
document.baseatkupgrades++;
permCache( "BaseAttackUpgrades", document.baseatkupgrades );
updateIncreaseBaseAtkButton();
}
function getIncreaseBaseAtkCost()
{
var timesUpgraded = document.baseatkupgrades;
return 5 + ( timesUpgraded * 5 );
}
function updateIncreaseBaseAtkButton()
{
var cost = getIncreaseBaseAtkCost();
$("#BaseAttack").html( getAttack() );
$("#tdIncreaseBaseAtk").button( "option", "label", "Increase Base Atk(+5 dmg, " + cost + " gold)" );
}
function getAttack()
{
return td.towerbaseatk + ( document.baseatkupgrades * 5 );
}
// Base Range
function increaseBaseRange()
{
document.baserangeupgrades++;
permCache( "BaseRangeUpgrades", document.baserangeupgrades );
updateIncreaseBaseRangeButton();
}
function getIncreaseBaseRangeCost()
{
var timesUpgraded = document.baserangeupgrades;
var increment = td.towerrangestepcost;
return increment + ( timesUpgraded * increment );
}
function updateIncreaseBaseRangeButton()
{
var cost = getIncreaseBaseRangeCost();
var rangestep = td.towerrangestep;
$("#BaseRange").html( getRange() );
$("#tdIncreaseBaseRange").button( "option", "label", "Increase Base Range(+" + rangestep + " range, " + cost + " gold)" );
}
function getRange()
{
return td.towerbaserange + ( document.baserangeupgrades * td.towerrangestep );
}
// HP
function increaseBaseHP()
{
document.basehp += td.towerhpstep;
document.basemaxhpupgrades++;
permCache( "BaseHPUpgrades", document.basemaxhpupgrades );
updateIncreaseBaseHPButton();
}
function getIncreaseBaseHPCost()
{
var increment = td.towerhpstepcost;
return increment;
}
function updateIncreaseBaseHPButton()
{
var cost = getIncreaseBaseHPCost();
var hpstep = td.towerhpstep;
$("#BaseMaxHP,#BaseMaxHPCopy").html( getMaxHP() );
$("#tdIncreaseBaseHP").button( "option", "label", "Increase Base Max HP(+" + hpstep + ", " + cost + " gold)" );
updateHP();
}
function getMaxHP()
{
return td.towerbasemaxhp + ( document.basemaxhpupgrades * td.towerhpstep );
}
function updateHP()
{
var cost = getMaxHP() - document.basehp;
permCache( "BaseHP", document.basehp );
$("#BaseHP,#BaseHPCopy").html( document.basehp );
$("#tdRestoreBaseHPAll").button( "option", "label", "Restore All HP(" + cost + " gold)" );
$("#tdRestoreBaseHPAll,#tdRestoreBaseHP").button( "option", "disabled", cost == 0 );
}
// Baddy HP
function increaseBaddyHP()
{
document.baddyhpup++;
permCache( "BaddyHPUp", document.baddyhpup );
updateIncreaseBaddyHPButton();
}
function getBaddyHP( type )
{
return td.baddytypes[ type ].hp + getBaddyHPAddAll();
}
function getBaddyHPAddAll()
{
var sum = 0;
var currentadd = td.baddyhpstep;
for( var i = 0; i < document.baddyhpup; i++ )
{
sum += currentadd;
currentadd += td.baddyhpstep;
}
return sum;
}
function updateIncreaseBaddyHPButton()
{
var initialhp = getBaddyHPAddAll();
var step = ( document.baddyhpup + 1 ) * td.baddyhpstep;
$("#InitialBaddyHP").html( "+" + initialhp + ",+" + document.baddyhpup * 2 + "% gold" );
$("#tdIncreaseBaddyHP").button( "option", "label", "Increase Baddy HP(+" + step + " HP,+2% gold)" );
}
// Baddy Speed
function increaseBaddySpeed()
{
document.baddyspeedup++;
permCache( "BaddySpeedUp", document.baddyspeedup );
updateIncreaseBaddySpeedButton();
}
function getBaddySpeed( type )
{
return td.baddybasespeed + ( td.baddyspeedstep * document.baddyspeedup ) +
td.baddytypes[ type ].speed;
}
function updateIncreaseBaddySpeedButton()
{
var initialspeed = ( td.baddyspeedstep * document.baddyspeedup );
var step = td.baddyspeedstep;
$("#InitialBaddySpeed").html( initialspeed + ",+" + document.baddyspeedup * 2 + "% gold" );
$("#tdIncreaseBaddySpeed").button( "option", "label", "Increase Baddy Speed(+" + step + " Speed,+2% gold)" );
}
// Baddy Complexity
function increaseBaddyComplexity()
{
document.baddycomplexity++;
permCache( "BaddyComplexity", document.baddycomplexity );
updateIncreaseBaddyComplexity();
// Quick list
var currlist = getComplexityWave();
// Check if we need to add a baddy to the beastiary
for( var i = 0; i < currlist.length; i++ )
{
var currlevel = currlist[ i ];
var found = false;
for( var j = 0; j < document.seenbaddies.length; j++ )
{
if( currlevel == document.seenbaddies[ j ] )
{
found = true;
break;
}
}
if( !found )
{
addBeastiary( currlevel );
}
}
}
function getBaddyComplexity()
{
return document.baddycomplexity;
}
function getComplexityWave()
{
return td.complexities[ getBaddyComplexity() - 1 ];
}
function updateIncreaseBaddyComplexity()
{
var complexity = getBaddyComplexity();
$("#BaddyComplexity").html( complexity );
if( td.complexities.length == complexity )
{
$("#tdIncreaseBaddyComplexity").button( "option", "disabled", true )
.button( "option", "label", "Max Complexity" );
}
else
{
$("#tdIncreaseBaddyComplexity").button( "option", "label", "Increase Baddy Complexity" );
}
}
function addBeastiary( level )
{
document.seenbaddies.push( level );
var baddyinfo = td.baddytypes[ level ];
$("#BeastiaryAccordion")
.append( "<h3><a href='#'>" + baddyinfo.name + "</a></h3>" )
.append( $("<div></div>")
// TODO: Insert image here...bunnies!
.append( "<div id='" + baddyinfo.image + "' style='width:128px;height:128px;display:inline;float:left;'></div>" )
.append( "<table style='color:white'><tr><td>Name</td><td colspan=3 style='text-align:center'>" +
baddyinfo.name + "</td></tr><tr><td>Health</td><td>" +
baddyinfo.hp + "</td><td style='color:#FFFF00'>+" + getBaddyHPAddAll() + "</td><td>=" +
getBaddyHP( level ) + "</td></tr><tr><td>Speed</td><td>" +
( td.baddybasespeed + baddyinfo.speed ) + "</td><td style='color:#FFFF00'>+" +
( td.baddyspeedstep * document.baddyspeedup ) + "</td><td>=" +
getBaddySpeed( level ) + "</td></tr></table>" )
).accordion( "destroy" ).accordion({
active: false,
collapsible: true,
clearStyle: true
});
}
// Baddy Spawning
function createBaddy( i_hp, i_multiplier, i_speed, i_distance, i_name )
{
return {
hp: i_hp,
multiplier: i_multiplier,
speed: i_speed,
distance: i_distance,
name: i_name,
toString: function(){
return "[" + this.hp + "," + this.multiplier + "," + this.speed +
"," + this.distance + ",\"" + this.name + "\"]";
}
};
}
function getBaddySpawnCount()
{
var wave = getComplexityWave();
return wave.length;
}
function spawnBaddyWave()
{
var count = getBaddySpawnCount();
var complexity = getBaddyComplexity();
var wave = getComplexityWave();
for( var i = 0; i < count; i++ )
{
var baddylevel = wave[ i ];
var multiplier = ( document.baddyhpup + document.baddyspeedup ) * 2;
multiplier += td.baddytypes[ baddylevel ].multiplier;
document.baddies.push( createBaddy( getBaddyHP( baddylevel ), multiplier,
getBaddySpeed( baddylevel ), td.baddybasespawndistance,
td.baddytypes[ baddylevel ].name ) );
}
permCache( "Baddies", document.baddies );
permCache( "BaddyCount", document.baddies.length, true );
}
// Baddy Damage
function damageBaddies( damage, indexes )
{
// Apply damage
// Put all dead baddies into an array
var deadIndexes = [], i, index, j = 0, goldearned = 0;
for( i = 0; i < indexes.length; i++ )
{
index = indexes[ i ];
document.baddies[ index ].hp -= damage;
if( document.baddies[ index ].hp <= 0 )
{
deadIndexes.push( index );
}
}
if( deadIndexes.length == 0 )
{
return;
}
// Sort the array of dead baddies
function sortByNumber( a, b )
{
return a - b;
}
deadIndexes = deadIndexes.sort( sortByNumber );
// Delete each one
while( deadIndexes.length > 0 )
{
document.totalbaddykills++;
index = deadIndexes.shift() - j;
goldearned += 1 + ( 0.01 * document.baddies[ index ].multiplier );
document.baddies.splice( index, 1 );
j++;
}
adjustGold( goldearned );
permCache( "TotalBaddyKills", document.totalbaddykills, true );
permCache( "BaddyCount", document.baddies.length, true );
}
function towerDLoop()
{
// Stop the loop if the window is hidden
var w = $( "div#Tower_D" );
if( !w.hasClass( "popup" ) )
{
clearInterval( document.towerdinterval );
return;
}
// Spawn a wave if there are no baddies
if( document.baddies.length == 0 )
{
spawnBaddyWave();
}
// Move each baddy closer and find the closest
var closestbaddy = -1;
var closestdistance = Number.MAX_VALUE;
var damagedbase = false;
for( var i = 0; i < document.baddies.length; i++ )
{
var speed = document.baddies[ i ].speed;
document.baddies[ i ].distance -= speed;
if( document.baddies[ i ].distance <= 0 )
{
document.baddies[ i ].distance = 0;
document.basehp--;
damagedbase = true;
}
if( document.baddies[ i ].distance < closestdistance )
{
closestbaddy = i;
closestdistance = document.baddies[ i ].distance;
}
}
// Update Base HP if damaged
if( damagedbase )
{
updateHP();
}
// Only attack if in range
var range = getRange();
if( closestdistance <= range )
{
// -= HP
damageBaddies( getAttack(), [closestbaddy] );
}
if( document.baddies.length == 0 )
{
spawnBaddyWave();
}
// Add graphical view stuff here
// Use #TabTowerView as your container, though you'll probably want a sub
// container so it doesn't mess up the layout
// document.baddies:
// Array of all the baddies
// Includes: hp, name, multiplier, speed and distance
// Useful for you would be distance mostly, maybe the others
// Use document.baddies.length to find out how many
// From there use it like document.baddies[ 0 ].distance, etc.
// GL
permCache( "Baddies", document.baddies );
}<file_sep><?php
require_once( 'MySQLObject.php' );
class UserDisabledModules extends MySQLObject
{
/**
* Gets the list of columns this MySQL table contains
*
* @return array Array of [USER_ID,MODULE_NAME,DISABLE_TIME]
*/
protected function getColumns()
{
return array( 'USER_ID', 'MODULE_NAME', 'DISABLE_TIME' );
}
/**
* Gets the name of the table
*
* @return string Name of the table (USER_DISABLED_MODULES)
*/
protected function getTableName()
{
return 'USER_DISABLED_MODULES';
}
/**
* Gets all of the disabled modules for a user
*
* @param int $userid User ID that is being queried for
* @return array 2D array of [MODULE_NAME,DISABLE_TIME]
*/
public function getDisabledModules( $userid )
{
return $this->get( array( 'USER_ID' => $userid ), NULL, 0,
array( 'MODULE_NAME', 'DISABLE_TIME' ) );
}
/**
* Disables a list of modules for a specified user
*
* @param int $userid User ID that wants to disable the modules
* @param array $modules Array of modules to disable
* @return int Number of modules disabled
*/
public function disableModules( $userid, $modules )
{
// Insert each value as upper case
$values = array();
foreach( $modules as $module )
{
array_push( $values, array( $userid, '"' . strtoupper( $module ) .
'"', time()));
}
$this->insert( $values, array( 'DISABLE_TIME' => time() ) );
return count( $modules );
}
/**
* Enables a list of modules for a specified user
*
* @param int $userid User ID that wants to enable the modules
* @param array $modules Array of modules to enable
* @return int Number of modules enabled
*/
public function enableModules( $userid, $modules )
{
// Convert every value to upper case
foreach( $modules as $key => &$value )
{
$value = '"' . strtoupper( $value ) . '"';
}
// Then delete the values
return $this->delete( array( 'USER_ID' => $userid,
'MODULE_NAME' => $modules ) );
}
}
?><file_sep><?php
/**
* Basic concept: Interface to the Servers MySQL table
*
* Uses:
*
*/
require_once( 'MySQLObject.php' );
class Servers extends MySQLObject
{
function generateAICustomName()
{
$min = 8;
$max = 12;
$len = rand( $min, $max );
for( $i = 0; $i < $len; $i++ )
{
$which = rand( 1, 3 );
if( $which == 1 )
{
$string .= chr( rand( 48, 57 ) );
}
elseif( $which == 2 )
{
$string .= chr( rand( 65, 90 ) );
}
else
{
$string .= chr( rand( 97, 122 ) );
}
}
return $string;
}
function getColumns( )
{
return array( 'ID', 'OWNER_ID', 'IP', 'CUSTOM_NAME', 'CPU', 'RAM',
'HDD', 'BANDWIDTH', 'LAST_UPDATE_TIME',
'OPERATING_RATIO' );
}
function getTableName( )
{
return 'SERVERS';
}
function getCPUInfoForServers( $arr )
{
return $this->get( array( 'ID' => $arr ), null, 0,
array( 'CPU', 'LAST_UPDATE_TIME',
'OPERATING_RATIO', 'ID' ) );
}
function getAvailableIP( )
{
// Get what IPs are already taken
$takenIPs = $this->getOnlyColumn( 'IP' );
// Generate a new random IP
$randomIP = rand( 1, 4294967296 ); // 256 ^ 4
// While the IP is already taken, generate a new one
while( in_array( $randomIP, $takenIPs ) )
{
$randomIP = rand( 1, 4294967296 );
}
// Return the new/available one
return $randomIP;
}
function addServer( $ownerid )
{
$randomIP = $this->getAvailableIP();
return $this->insert( array( 'NULL', $ownerid, $randomIP, "''",
DEFAULT_CPU, DEFAULT_RAM, DEFAULT_HDD,
DEFAULT_BW, 'NOW()', '1.0' ) );
}
function updateName( $serverid, $newname )
{
$newname = '"' . mysql_real_escape_string( $newname ) . '"';
return $this->update( array( 'CUSTOM_NAME' => $newname ),
array( 'ID' => $serverid ) );
}
function getServersByOwner( $ownerid )
{
return $this->get( array( 'OWNER_ID' => $ownerid ),
array( 'ID' => 'ASC' ) );
}
function getAllServers( )
{
return $this->get( NULL, array( 'ID' => 'ASC' ) );
}
function getServerByIP( $ip )
{
return $this->get( array( 'IP' => $ip ), NULL, 1 );
}
function getServerByID( $id )
{
return $this->getSingle( $id );
}
function getServerIDByIP( $ip )
{
$row = $this->getServerByIP( $ip );
if( is_array( $row ) )
{
return $row[ 'ID' ];
}
return false;
}
function adjustCPU( $server, $amount )
{
return $this->adjustSingleByID( $server, 'CPU', $amount );
}
function adjustRAM( $server, $amount )
{
return $this->adjustSingleByID( $server, 'RAM', $amount );
}
function adjustHDD( $server, $amount )
{
return $this->adjustSingleByID( $server, 'HDD', $amount );
}
function adjustBW( $server, $amount )
{
return $this->adjustSingleByID( $server, 'BANDWIDTH', $amount );
}
function randomizeServerIP( $server )
{
$randomIP = $this->getAvailableIP();
return $this->adjustSingleByID( $server, 'IP', $randomIP );
}
function adjustAllStats( $server, $cpu, $ram, $hdd, $bw )
{
$changes = array();
if( $cpu != 0 )
{
$changes[ 'CPU' ] = "CPU+$cpu";
}
if( $ram != 0 )
{
$changes[ 'RAM' ] = "RAM+$ram";
}
if( $hdd != 0 )
{
$changes[ 'HDD' ] = "HDD+$hdd";
}
if( $bw != 0 )
{
$changes[ 'BANDWIDTH' ] = "BANDWIDTH+$bw";
}
if( empty( $changes ) )
{
return 0;
}
return $this->update( $changes, array( 'ID' => $server ) );
}
function updateCPUInfo( $serverid, $array )
{
return $this->update( $array, array( 'ID' => $serverid ) );
}
}
?><file_sep>/******************************************
Filter key strokes on inputs - v0.1
https://bitbucket.org/sacah/filterkeystrokes/src
******************************************/
(function($) {
$.fn.filterKeys=function() {
$('.filterkeys', this).unbind('keypress.filterkeys').bind('keypress.filterkeys', function(e) {
var k=e.which;
if(!k || k==8) return true;
var pattern=$(this).attr('data-filterkeys');
if(pattern) {
try {
var filterRegexp=new RegExp(pattern);
} catch(err) {
return true;
}
var key=String.fromCharCode(k);
if(!filterRegexp.test(key)) return false;
}
});
return this;
};
}(jQuery));<file_sep><?php
require_once( 'MySQLObject.php' );
class Errors extends MySQLObject
{
function getColumns()
{
return array( 'ERROR_TIME', 'DESCRIPTION', 'POST_DATA', 'SESSION_DATA',
'IP' );
}
function getTableName()
{
return 'ERRORS';
}
function getAllErrors()
{
return $this->get();
}
function addError( $description )
{
$post = cleanupRowForJS( $_REQUEST );
$post = mysql_real_escape_string( $post );
$len = count( $_SESSION );
$sessKeys = array_keys( $_SESSION );
$sess = '[';
for( $i = 0; $i < $len; $i++ )
{
$sess .= '"' . $sessKeys[ $i ];
$sess .= '" => "' . $_SESSION[ $sessKeys[ $i ] ];
$sess .= '" ';
}
$sess .= ']';
$sess = mysql_real_escape_string( $sess );
$description = mysql_real_escape_string( $description );
return $this->insert(
array( 'NOW()',
"\"$description\"",
"\"$post\"",
"\"$sess\"",
"\"{$_SERVER['REMOTE_ADDR']}\"" ) );
}
}
?>
<file_sep><?php
require_once( 'jsmin.php' );
require_once( 'inc.php' );
session_start();
srand();
date_default_timezone_set('America/Los_Angeles');
define('JQUERY_VERSION', '1.7.1');
define('JQUERY_UI_VERSION', '1.8.16');
define('JQUERY_LAYOUT_VERSION', '1.3.0');
define('JTASKBAR_VERSION', '0.2');
define('JQUERY_JS', 'jquery-' . JQUERY_VERSION . '.min');
define('JQUERY_UI_CSS', 'jquery-ui-' . JQUERY_UI_VERSION . '.custom');
define('JQUERY_UI_JS', 'jquery-ui-' . JQUERY_UI_VERSION . '.custom.min');
define('JQUERY_LAYOUT_JS', 'jquery-layout-' . JQUERY_LAYOUT_VERSION . '.min');
define('JQUERY_LAYOUT_CSS', 'jquery-layout-' . JQUERY_LAYOUT_VERSION);
define('JQUERY_JTASKBAR', 'jquery-jtaskbar-' . JTASKBAR_VERSION);
define('JQUERY_QTIP', 'jquery.qtip.min');
$GLOBALS['JQUERY_FILES'] = array( JQUERY_JS, JQUERY_UI_JS, JQUERY_LAYOUT_JS,
JQUERY_JTASKBAR, JQUERY_QTIP );
define('DB_NAME', 'admin_lad');
define('DB_USERNAME', 'admin_lad');
define('DB_PASS', '<PASSWORD>');
// Some default hardware for a computer
define('DEFAULT_CPU', 350); // Expressed in MHz
define('DEFAULT_RAM', 32); // Expressed in MB
define('DEFAULT_HDD', 250); // Expressed in MB
define('DEFAULT_BW', 3); // Expressed in KB
// How much every research point will grant in each category
define('STEP_CPU', 50);
define('STEP_RAM', 4);
define('STEP_HDD', 10);
define('STEP_BW', 1);
// Some research related stuff
define('DEFAULT_RESEARCH_CPU', 100);
define('DEFAULT_RESEARCH_RAM', 10);
define('DEFAULT_RESEARCH_TIME', 300);
// Some deletion related stuff
define('DEFAULT_DELETION_CPU', 25);
define('DEFAULT_DELETION_RAM', 5);
// Basic operations that processes can perform
define('PROCESS_OP_TRANSFER', 1);
define('PROCESS_OP_RESEARCH', 2);
define('PROCESS_OP_ENCRYPT', 3);
define('PROCESS_OP_DECRYPT', 4);
define('PROCESS_OP_DELETE', 5);
define('PROCESS_OP_COPY', 6);
define('PROCESS_OP_INSTALL', 7);
define('PROCESS_OP_UNINSTALL', 8);
define('PROCESS_OP_FIREWALL', 9);
define('PROCESS_OP_FIREWALLBREAKER', 9);
define('PROCESS_OP_PASSWORD', 9);
define('PROCESS_OP_PASSWORDBREAKER', 9);
// Basic program types
define('PROGRAM_TYPE_FIREWALL', 1);
define('PROGRAM_TYPE_FIREWALLBREAKER', 2);
define('PROGRAM_TYPE_PASSWORD', 3);
define('PROGRAM_TYPE_PASSWORDBREAKER', 4);
define('PROGRAM_TYPE_ENCRYPTER', 5);
define('PROGRAM_TYPE_DECRYPTER', 6);
define('PROGRAM_TYPE_MALWARE', 7);
// Some sizes for level 1 programs
define('FIREWALL_SIZE', 5);
define('FIREWALLBREAKER_SIZE', 10);
define('PASSWORD_SIZE', 2);
define('PASSWORDBREAKER_SIZE', 4);
define('ENCRYPTOR_SIZE', 40);
define('DECRYPTOR_SIZE', 40);
define('MALWARE_SIZE', 25);
// Difficulty levels for math problems
define('MATH_DIFF_ADD', 1);
define('MATH_DIFF_SUB', 2);
define('MATH_DIFF_MULT', 3);
define('MATH_DIFF_DIV', 4);
define('MATH_DIFF_ROOT', 5);
// Minimum crafting find chance
define('MINIMUM_CRAFT_FIND', 20);
// Forces an array to be 2D
function force2DArray( $val )
{
if( !is_array( $val ) )
{
return array( array( $val ) );
}
if( isset( $val[ 0 ] ) && !is_array( $val[ 0 ] ) )
{
return array( $val );
}
return $val;
}
// Gets all the operations that increase the capacity of a server
function getHDDConsumingOperations( )
{
return array( PROCESS_OP_COPY, PROCESS_OP_RESEARCH, PROCESS_OP_TRANSFER );
}
// Gets all variables that are client side
function getClientSideDefines()
{
return array(
'RESEARCH_CPU' => DEFAULT_RESEARCH_CPU,
'RESEARCH_RAM' => DEFAULT_RESEARCH_RAM,
'RESEARCH_TIME' => DEFAULT_RESEARCH_TIME,
'DELETE_CPU' => DEFAULT_DELETION_CPU,
'DELETE_RAM' => DEFAULT_DELETION_RAM,
'OP_TRANSFER' => PROCESS_OP_TRANSFER,
'OP_RESEARCH' => PROCESS_OP_RESEARCH,
'OP_ENCRYPT' => PROCESS_OP_ENCRYPT,
'OP_DECRYPT' => PROCESS_OP_DECRYPT,
'OP_DELETE' => PROCESS_OP_DELETE,
'OP_COPY' => PROCESS_OP_COPY,
'OP_INSTALL' => PROCESS_OP_INSTALL,
'OP_UNINSTALL' => PROCESS_OP_UNINSTALL,
'OP_FIREWALL' => PROCESS_OP_FIREWALL,
'OP_FIREWALLBREAKER' => PROCESS_OP_FIREWALLBREAKER,
'OP_PASSWORD' => PROCESS_OP_PASSWORD,
'OP_PASSWORDBREAKER' => PROCESS_OP_PASSWORDBREAKER,
'STEP_CPU' => STEP_CPU,
'STEP_RAM' => STEP_RAM,
'STEP_HDD' => STEP_HDD,
'STEP_BW' => STEP_BW,
'MATH_DIFF_ADD' => MATH_DIFF_ADD,
'MATH_DIFF_SUB' => MATH_DIFF_SUB,
'MATH_DIFF_MULT' => MATH_DIFF_MULT,
'MATH_DIFF_DIV' => MATH_DIFF_DIV,
'MATH_DIFF_ROOT' => MATH_DIFF_ROOT,
'ALL_MODULES' => implode(',', array_keys( opt_getValidModules() ) )
);
}
// Gets the size of a program based on its type
function getProgramSize( $type )
{
switch( $type )
{
case PROGRAM_TYPE_FIREWALL:
return FIREWALL_SIZE;
case PROGRAM_TYPE_FIREWALLBREAKER:
return FIREWALLBREAKER_SIZE;
case PROGRAM_TYPE_PASSWORD:
return PASSWORD_SIZE;
case PROGRAM_TYPE_PASSWORDBREAKER:
return PASSWORDBREAKER_SIZE;
case PROGRAM_TYPE_ENCRYPTER:
return ENCRYPTOR_SIZE;
case PROGRAM_TYPE_DECRYPTER:
return DECRYPTOR_SIZE;
case PROGRAM_TYPE_MALWARE:
return MALWARE_SIZE;
}
}
/**
* Returns an array with all of the current valid modules.
* @return array Array of [SERVERS,MATH,CRAFTING,TOWERD]
*/
function opt_getValidModules()
{
return array(
'SERVERS' => array(),
'MATH' => array(),
'CRAFTING' => array(),
'TOWERD' => array('towerd'),
'JAVABE' => array()
);
}
/**
* @param array $row 1-D row to cleanup for viewing
* @return string Similar to implode except that strings are properly handled
*/
function cleanupRowForJS( $row )
{
$keys = array_keys( $row );
$len = count( $row );
$lenM1 = $len - 1;
$ret = '';
for( $i = 0; $i < $len; $i++ )
{
$item = $row[ $keys[ $i ] ];
if( is_string( $item ) )
{
$item = "\"" . mysql_real_escape_string( $item ) . "\"";
}
$ret .= $item;
if( $i < $lenM1 )
{
$ret .= ',';
}
}
return $ret;
}
// Tells JS to handle a 2D array based on its existence
function echo2DArray( $validfunc, $invalidfunc, $arr )
{
$arrayCount = count( $arr );
if( $arrayCount == 0 )
{
echo "$invalidfunc();";
}
else
{
$arrayCountM1 = $arrayCount - 1;
echo "$validfunc([";
for( $i = 0; $i < $arrayCount; $i++ )
{
$prettyRow = cleanupRowForJS( $arr[ $i ] );
echo "[$prettyRow]";
if( $i < $arrayCountM1 )
{
echo ',';
}
}
echo ']);';
}
}
// Only works with CSS or JS files
function clientfile_getCacheName( $type, $base )
{
$folder = clientfile_getFolder( $type );
$extension = clientfile_getExtension( $type );
if( $type == 'J' || $type == 'C' )
{
$folder = "$folder/cache";
}
return "$folder/$base.$extension";
}
function clientfile_getType( $extension )
{
switch( $extension )
{
case 'js':
return 'J';
case 'css':
return 'C';
case 'jpg':
return 'P';
case 'jpeg':
return 'E';
case 'png':
return 'N';
case 'svg':
return 'S';
case 'gif':
return 'G';
}
}
function clientfile_getExtension( $type )
{
switch( $type )
{
case 'J':
return 'js';
case 'C':
return 'css';
case 'P':
return 'jpg';
case 'E':
return 'jpeg';
case 'N':
return 'png';
case 'S':
return 'svg';
case 'G':
return 'gif';
}
}
function clientfile_getFolder( $type )
{
if( $type == 'J' )
{
return 'js';
}
if( $type == 'C' )
{
return 'css';
}
return 'img';
}
function clientfile_getName( $type, $base )
{
$extension = clientfile_getExtension( $type );
switch( $type )
{
case 'J':
if( is_dir( "js/$base" ) )
{
return "js/$base";
}
return "js/$base.$extension";
case 'C':
return "css/$base.$extension";
case 'P':
case 'E':
case 'N':
case 'S':
case 'G':
return "img/$base.$extension";
}
}
function clientfile_getApplicationType( $type )
{
switch( $type )
{
case 'J':
return 'text/javascript';
case 'C':
return 'text/css';
case 'P':
case 'E':
return 'image/jpeg';
case 'N':
return 'image/png';
case 'S':
return 'image/svg+xml';
case 'G':
return 'image/gif';
}
}
function clientfile_buildRequest( $type, $base )
{
$cacheName = clientfile_getCacheName( $type, $base );
$actualFile = clientfile_getName( $type, $base );
if( $type == 'C' || $type == 'J' )
{
if( !is_dir( $actualFile ) )
{
$mtime = filemtime( $actualFile );
}
else
{
$maxTime = 0;
foreach( scandir( $actualFile ) as $subFile )
{
if( $subFile == '.' || $subFile == '..' )
{
continue;
}
$subTime = filemtime( "$actualFile/$subFile" );
if( $subTime > $maxTime )
{
$maxTime = $subTime;
}
}
chmod( $actualFile, 0777 );
touch( $actualFile, $maxTime );
$mtime = $maxTime;
}
if( !is_file( $cacheName ) || $mtime > filemtime( $cacheName ) )
{
clientfile_cache( $type, $base );
}
}
else
{
$mtime = filemtime( $actualFile );
}
return "get.php?t=$type&f=$base&m=$mtime";
}
function clientfile_cache( $type, $base )
{
$cacheFileName = clientfile_getCacheName( $type, $base );
$actualFileName = clientfile_getName( $type, $base );
if( !file_exists( 'js/cache' ) )
{
mkdir( 'js/cache' );
}
if( !file_exists( 'css/cache' ) )
{
mkdir( 'css/cache' );
}
// Rebuild cache
// Read each line one at a time
if( is_file( $actualFileName ) )
{
$lineArray = file( $actualFileName );
}
else
{
// File is actually a folder that needs to be compiled
$folder = "js/$base";
$longString = '';
foreach( scandir( $folder ) as $subFile )
{
$subFilePath = "$folder/$subFile";
// Ignore . and ..
if( $subFile == '.' || $subFile == '..' )
{
continue;
}
// Add each line to the long string
$longString .= file_get_contents( $subFilePath );
}
$lineArray = preg_split( "/[\\r\\n]+/", $longString );
}
$outBuffer = '';
foreach( $lineArray as $line )
{
// URL's are not correct so...let's fix em
// We need to extract the image name out of the url()
// and then replace it with a string that we build
$urlIndex = strpos( $line, 'url(' );
if( $urlIndex === false )
{
// No URL, just echo out
$outBuffer .= "$line\n";
}
else
{
// Alright, there is a URL, so figure out exactly where it is
$fileIndex = $urlIndex + 11;
$otherparenIndex = strpos( $line, ')', $urlIndex );
$fullFileName = substr( $line, $fileIndex,
$otherparenIndex - $fileIndex );
$dotIndex = strrpos( $fullFileName, '.' );
$extension = substr( $fullFileName, $dotIndex + 1 );
$fileName = substr( $fullFileName, 0, $dotIndex );
$extensionType = clientfile_getType( $extension );
$replacement = clientfile_buildRequest( $extensionType, $fileName );
// Now just replace it and echo out
$outBuffer .= substr_replace( $line, $replacement, $urlIndex + 4,
strlen( $fullFileName ) + 7 ) . "\n";
}
}
// Add defaults to main
if( $type == 'J' && $base == 'main' )
{
$outBuffer .= "function getDefault(val){";
$csd = getClientSideDefines();
foreach( $csd as $index => $value )
{
if( is_string( $value ) )
{
$value = "\"$value\"";
}
$outBuffer .= "if(val==\"$index\"){return $value;}";
}
$outBuffer .= "return 0;}";
}
if( $type == 'J' && !in_array( $base, $GLOBALS['JQUERY_FILES'] ) )
{
$outBuffer = JSMin::minify( $outBuffer );
$outBuffer = str_replace( "\n", '', $outBuffer );
}
file_put_contents( $cacheFileName, $outBuffer );
}
function getHighestBit( $number, $bit )
{
if( $bit > 4 || $bit < 1 )
{
$bit = 1;
}
$number = $number >> ( ( 4 - $bit ) * 2 );
if( $number & 0x1 )
{
return 1;
}
if( $number & 0x2 )
{
return 2;
}
if( $number & 0x4 )
{
return 3;
}
if( $number & 0x8 )
{
return 4;
}
if( $number & 0x10 )
{
return 5;
}
if( $number & 0x20 )
{
return 6;
}
if( $number & 0x40 )
{
return 7;
}
if( $number & 0x80 )
{
return 8;
}
return 0;
}
/*************** END OF FUNCTIONS - BEGIN INIT ********************************/
// Connect to MySQL
$sqlConnection = mysql_pconnect( 'localhost', DB_USERNAME, DB_PASS );
if( !$sqlConnection )
{
die( 'Failed to connect to MySQL.' . mysql_error() );
}
// Select Database
$dbSelection = mysql_select_db( DB_NAME );
if( !$dbSelection )
{
die( 'Failed to select DB in MySQL.' . mysql_error() );
}
// Validate admin variables
if( !@constant( 'ADMIN_DISABLED_MODULES' ) )
{
$text = <<<EOT
<?
define('ADMIN_DISABLED_MODULES','');
?>
EOT;
file_put_contents( $_SERVER['DOCUMENT_ROOT'] . '/LAD/private/inc.php', $text );
}
?><file_sep><?php
/**
* @file ah_login.php
*
* Basic concept: Handle login related ajax commands
*
* Handled $action values:
* newuser1
* newuser2
* login
*
* Session vars:
* ID = Sets the ID into session to help control authorization
* username = Sets the Username into session to send to newuser2 during
* creation of account.
* password = Sets the Password into session to send to newuser2 during
* creation of account.
*
* 1. If Login was selected checks if user/pass combo is valid. Does a sanity
* check on the user/pass for length else die.
* 1a. If Login was invalid echo back to main.js invalidLoginCombo().
* 1b. If Login was valid sets session for ID and NICK. Then echo back to
* main.js validLogin() with ID of the user.
* 2. If New User was selected it requests username/password from main.js login
* form. Does a sanity check on the user/pass for length else die.
* 2a. Checks to see if Username is already taken. If unavailable echo back to
* main.js usernameTaken().
* 2b. If Username is available puts username/password into session vars and
* echo back to main.js usernameAvailable().
* 3. Once user inputs the retyped password and email newuser2 checks if the
* password matches and if the email is already in use. It pulls in the
* session vars from step 1b2 and checks if they are valid else die.
* 3a. If the Email is already in the database echo back to main.js emailTaken()
* 3b. If the Email isn't used places username, password, and email into the
* database. Gets the ID for the new user, and places that into session
* var and echo back to main.js acountCreated() with their new ID.
*/
require_once( 'private/users.php' );
function isValidEmail($email){
$pattern = "/^[-_a-z0-9\'+*$^&%=~!?{}]++(?:\.[-_a-z0-9\'+*$^&%=~!?{}]+)*"
. "+@(?:(?![-.])[-a-z0-9.]+(?<![-.])\.[a-z]{2,6}|\d{1,3}(?:\.\d{1,3}){3})"
. "(?::\d++)?$/iD";
if(!preg_match($pattern, $email)){
return false;
}
return true;
}
function loadApplicableModules( $user, $result )
{
require_once( 'userdisabledmodules.php' );
// Set up session vars
$_SESSION[ 'ID' ] = $result[ 'ID' ];
$_SESSION[ 'username' ] = $result[ 'NICK' ];
$_SESSION[ 'GATHERING_POINTS' ] = $result[ 'GATHERING_POINTS' ];
$id = $result[ 'ID' ];
$gpoints = $result[ 'GATHERING_POINTS' ];
// Check admin
$isAdmin = $_SESSION['isAdmin'] = $user->isUserDataAdmin( $result );
echo "validLogin($id,$gpoints);";
// Add admin script/stylesheet if admin
if( $isAdmin )
{
echo 'addScriptElement("' .
clientfile_buildRequest( 'J', 'admin' ) . '");';
echo 'addStylesheet("' .
clientfile_buildRequest( 'C', 'admin' ) . '");';
}
// Walk over modules and add script elements for enabled ones
$validModules = opt_getValidModules();
$userdisabledmodules = new UserDisabledModules();
$disabledModules = $userdisabledmodules->getDisabledModules( $id );
function moduleWalker( $value, $key, $modules )
{
// Check if the user has disabled this specific module
$varupper = strtoupper( $key );
foreach( $modules as $module )
{
if( $varupper == $module[ 'MODULE_NAME' ] )
{
return;
}
}
// Check if this module has been disabled by the system
if( strpos( ADMIN_DISABLED_MODULES, $varupper ) !== false )
{
return;
}
$request = clientfile_buildRequest( 'J', strtolower( $key ) );
echo "addScriptElement('$request');";
if( !empty( $value ) )
{
foreach( $value as $csssheet )
{
$cssrequest = clientfile_buildRequest( 'C', $csssheet );
echo "addStylesheet('$cssrequest');";
}
}
}
array_walk( $validModules, "moduleWalker", $disabledModules );
}
/*********************************** STEP 1 ***********************************/
if( $action == 'login' )
{
$rnick = $_REQUEST['username'];
$rpass = $_REQUEST['<PASSWORD>'];
if( strlen($rnick) < 4 || strlen($rnick) > 20 || !ctype_alnum($rnick) ||
preg_match('/^\d/', $rnick) === 1 )
{
ahdie('Stupid Muppet! Username is the wrong!');
}
if( strlen($rpass) < 4 || strlen($rpass) > 40 )
{
ahdie('Stupid Muppet! Password is the wrong length!');
}
$user = new Users();
$result = $user->checkCombo( $rnick, $rpass );
/*********************************** STEP 1a **********************************/
if( $result == false )
{
echo "invalidLoginCombo();";
}
/*********************************** STEP 1b **********************************/
else
{
loadApplicableModules( $user, $result );
}
}
/*********************************** STEP 2 ***********************************/
elseif( $action == 'newuser1' )
{
$rnick = $_REQUEST['username'];
$rpass = $_REQUEST['<PASSWORD>'];
if( !( strlen($rnick) > 3 && strlen($rnick) < 21 || !ctype_alnum($rnick) ||
preg_match('/^\d/', $rnick) === 1 ) )
{
ahdie('Stupid Muppet! Username is the wrong!');
}
else
{
$nick = $rnick;
}
if( !( strlen($rpass) > 3 && strlen($rpass) < 41 ) )
{
ahdie('Stupid Muppet! Password is the wrong length!');
}
else
{
$pass = $rpass;
}
$user = new Users();
$result = $user->checkUsernameExists( $nick );
/*********************************** STEP 2a **********************************/
if( $result != 0 )
{
echo "usernameTaken()";
}
/*********************************** STEP 2b **********************************/
else
{
$_SESSION['username'] = $nick;
$_SESSION['password'] = $pass;
echo "usernameAvailable()";
}
}
/*********************************** STEP 3 ***********************************/
elseif( $action == 'newuser2' )
{
if(!isset($_SESSION[ 'username' ]) || !isset($_SESSION[ 'password' ]) ||
!isset($_REQUEST[ 'email' ]))
{
ahdie('Stupid Muppet! Invalid Username/Passord!');
}
$nick = $_SESSION[ 'username' ];
$pass = $_SESSION['<PASSWORD>'];
$email = $_REQUEST['email'];
if( !isValidEmail( $email ) )
{
ahdie('Stupid muppet! Your email isn\'t formatted right!');
}
$cpass = $_REQUEST['cpassword'];
if( $pass != $cpass )
{
echo "cpasswordInvalid()";
}
else
{
$user = new Users();
$result = $user->checkEmailExists( $email );
/*********************************** STEP 3a **********************************/
if( $result != 0 )
{
echo "emailTaken()";
}
/*********************************** STEP 3b **********************************/
else
{
$id = $user->addUser( $nick, $pass, $email );
$result = $user->checkCombo( $nick, $pass );
loadApplicableModules( $user, $result );
}
}
}
/*********************************** STEP 4 ***********************************/
if( $action == 'passreset' )
{
if(!isset($_REQUEST[ 'username' ]) || !isset($_REQUEST[ 'email' ]))
{
ahdie('Stupid Muppet! Invalid Username!');
}
$nick = $_REQUEST[ 'username' ];
$email = $_REQUEST['email'];
if( !isValidEmail( $email ) )
{
ahdie('Stupid muppet! Your email isn\'t formatted right!');
}
$user = new Users();
$result = $user->checkEmailMatches( $nick, $email );
/*********************************** STEP 4a **********************************/
if( $result == false )
{
echo "emailWrong()";
}
/*********************************** STEP 4b **********************************/
else
{
$id = $result[ 'ID' ];
$nick = $result[ 'USER' ];
$user->changePass($id, $nick, $email);
}
}
?><file_sep><?php
class Crafting
{
private $cellX = 0;
private $cellY = 0;
private $cellUser = 0;
private $cellItemCount = 0;
private $cellHitChance = 0;
private $maxItemProbability = 0;
private function getItemCountOnMap( $bit )
{
return getHighestBit( $bit, 1 );
}
private function seedOnCellMap( )
{
srand( $this->cellX + ( $this->cellY * 256 * 256 ) +
pow( $this->cellUser, 3 ) );
}
private function calculateHitChance( $bit )
{
$value = $bit >> 4 % 256;
$value = intval( $value / 2 );
return min( 100, MINIMUM_CRAFT_FIND + $value );
}
private function getMaxItemProbabilityValue()
{
$ret = MySQLObject::getCustom( 'SELECT MAX(MAX_PROB) AS PROB FROM ' .
'ITEM_TYPES' );
return $ret[ 'PROB' ];
}
public function getTableName( )
{
return 'ITEM_TYPES';
}
public function getColumns()
{
return array( 'ID', 'NAME', 'DESCRIPTION', 'MIN_PROB', 'MAX_PROB' );
}
public function getItemsOnMap( $x, $y, $userid )
{
$this->cellX = $x;
$this->cellY = $y;
$this->cellUser = $userid;
$this->seedOnCellMap( );
$bit = rand( 1, 256 * 256 );
$this->cellItemCount = $this->getItemCountOnMap( $bit );
$this->cellHitChance = $this->calculateHitChance( $bit );
$this->maxItemProbability = $this->getMaxItemProbabilityValue();
$sql = '';
for( $i = 0; $i < $this->cellItemCount; $i++ )
{
$itemid = rand( 1, $this->maxItemProbability );
if( $sql != '' )
{
$sql .= ' OR';
}
$sql .= " (MIN_PROB > $itemid AND MAX_PROB < $itemid)";
}
$sql = "SELECT * FROM ITEM_TYPES WHERE $sql ORDER BY MIN_PROB";
$result = MySQLObject::getCustom( $sql );
return $result;
}
public function getItemTypeCount()
{
$ret = MySQLObject::getCustom( 'SELECT COUNT(ID) AS COUNT FROM ' .
'ITEM_TYPES' );
return $ret[ 'COUNT' ];
}
}
?><file_sep><?php
/**
* @file ah_math.php
*
* Basic concept: Handle math related ajax commands
*
* Handled actions:
* nextmathquestion: Gets the next math question (optionally answers previous)
*/
require_once( 'solvedmath.php' );
/**
* Gets a set number of random numbers between 0 and a maximum. Ensures that at
* least one of the values is equal to the maximum
*
* @param int $max Maximum number to get from
* @param int $remaining Remaining number of operands
* @param boolean $highestFound Whether the highest number has been found
* @return array All the operands to use
*/
function getRandomOperands( $max, $count )
{
$ret = array();
$foundMax = false;
for( $i = 0; $i < $count; $i++ )
{
if( !$foundMax )
{
$remaining = $max - $i;
$findMax = rand( 1, $remaining ) == $remaining;
if( $findMax )
{
$foundMax = true;
$ret[] = $max;
}
else
{
$ret[] = rand( 1, $max - 1 );
}
}
else
{
$next = rand( 1, $max );
if( $next == $max )
{
$foundMax = true;
}
$ret[] = $next;
}
}
return $ret;
}
/**
* Converts an integer to a string representation of a math operand
*
* @param int $val Value to convert
* @return int Resulting string
*/
function intToMathOperand( $val )
{
switch( $val )
{
case MATH_DIFF_ADD:
return '+';
case MATH_DIFF_SUB:
return '-';
case MATH_DIFF_MULT:
return '*';
case MATH_DIFF_DIV:
return '/';
}
return '';
}
if( $action == 'nextmathquestion' )
{
// Session vars used:
// NEXT_ANSWER: The correct answer that should come in next
// NEXT_DIFF: The calculated difficulty for the next answer
// If the user is answering the previous question, check if it is correct
if( isset( $_SESSION[ 'NEXT_ANSWER' ] ) &&
isset( $_REQUEST[ 'LAST_ANSWER' ] ) )
{
$nextAnswer = $_SESSION[ 'NEXT_ANSWER' ];
$lastAnswer = $_REQUEST[ 'LAST_ANSWER' ];
if( $nextAnswer == $lastAnswer )
{
echo( 'correctMathAnswer();' );
// Also update the DB saying that the user got one
}
else
{
echo( "incorrectMathAnswer(\"$nextAnswer\");" );
}
}
// Set up some variables
$difficulty = $_REQUEST[ 'DIFFICULTY' ];
$modifiers = $_REQUEST[ 'MODIFIERS' ];
// Modifiers come from user in a comma delimited string
$modifierArray = split( ',', $modifiers );
// Each of the modifiers affects the difficulty, figure out which ones
// the user wants to use
$extraOperands = 0;
$extraPrecision = 0;
$useNegative = false;
$useFractions = false;
$useDecimals = false;
foreach( $modifierArray as $modifier )
{
if( strpos( $modifier, 'negative' ) !== false )
{
// Allow negative values
$useNegative = true;
}
elseif( strpos( $modifier, 'fraction' ) !== false )
{
// Allow fractions
$useFractions = true;
}
elseif( strpos( $modifier, 'decimal' ) !== false )
{
// Allow decimals
$useDecimals = true;
}
elseif( strpos( $modifier, 'extraprecision' ) !== false )
{
// Extra precision (for answers)
$offset = strpos( '=', $modifier ) + 1;
$extraPrecision = intval( substr( $modifier, $offset ) );
}
elseif( strpos( $modifier, 'extraoperand' ) !== false )
{
// Extra operands
$offset = strpos( '=', $modifier ) + 1;
$extraOperand = intval( substr( $modifier, $offset ) );
}
}
// Now all of the data from what the user wants is available, time to turn
// it into an actual usable string
// Generate some random operands...ensuring we get at least one of the max
$operandCount = 1 + $extraOperands;
$operands = getRandomOperands( $difficulty, $operandCount );
// Generate some random values
$maxValue = pow( 10, 1 + $extraPrecision );
$valueCount = $operandCount + 1;
$values = array();
for( $i = 0; $i < $valueCount; $i++ )
{
$values[] = rand( 1, $maxValue );
}
// Compile the values/strings into a string
$output = '';
for( $i = 0; $i < $operandCount; $i++ )
{
$output .= $values[ $i ] . ' ' . $operands[ $i ] . ' ';
}
$output .= $values[ $operandCount ];
// Just eval it so we can get the answer
eval( "\$nextAnswer=$output;" );
$_SESSION[ 'NEXT_ANSWER' ] = $nextAnswer;
// Now generate some random answers
// We do this by starting off with the correct answer, then deviate by
// 10-20% in either direction. If the new value is the same as the previous
// it is in/decremented. Repeat 4 times and there's the random values.
$bogusAnswers = array( $nextAnswer );
$minAnswer = $maxAnswer = $nextAnswer;
for( $i = 0; $i < 4; $i++ )
{
$deviation = rand( 10, 20 ) / 100;
if( rand( 0, 1 ) )
{
$lastMin = $minAnswer;
$minAnswer = round( $minAnswer + $deviation, $extraPrecision );
if( $lastMin == $minAnswer )
{
$minAnswer--;
}
$bogusAnswers[] = $minAnswer;
}
else
{
$lastMax = $maxAnswer;
$maxAnswer = round( $maxAnswer + $deviation, $extraPrecision );
if( $lastMax == $maxAnswer )
{
$maxAnswer++;
}
$bogusAnswers[] = $maxAnswer;
}
}
// Shuffle the answers a bit
shuffle( $bogusAnswers );
// Encode the otuput properly
$output = mysql_real_escape_string( $output );
// And echo!
echo( "nextMathQuestion('$output',[" . join( ',', $bogusAnswers) . ']);' );
}
?>
<file_sep><?php
/**
* @file ah_server.php
*
* Basic concept: Handle server related ajax commands
*
* Handled $action values:
* requestservers = User is requesting their list of servers
* requestfreeserver = User wants their first server for free
* viewserver = User wants to see all information about a server
* freeprograms = User is requesting their free programs
* startresearch = User wants to start researching a program
* finishprocess = User wants to finish a research that is running
* startdelete = User wants to delete a file
*
* Session vars:
* ID = Sets the ID into session to help control authorization
* username = Sets the Username into session to send to newuser2 during
* creation of account.
* password = Sets the Password into session to send to <PASSWORD> during
* creation of account.
* SERVER_ID = The ID of the server to take action on
* PROGRAM_ID = The ID of the program to take action on
*
* 1. If action was to request servers then we need to simply return the
* servers that belongs to the user
* 2. User want to request their free server, make sure they don't have one
* already then give them a free one
* 3. User wants to view a server
* 3a. For now we'll just make sure it's theirs and if it is they can view
* 3b. After this then we'll report back the server information followed
* by all programs and processes
* 4. User wants their free programs
* 4a. Ensure the user actually owns the server
* 4b. Ensure they don't have all of the free programs
* 5. User wants to research their program
* 5a. Check to make sure the program belongs to them
* 5b. Do a quick check if there's enough space after that research
* 5c. Also check to make sure there is enough space after all researches are
* done to hold this one as well
* 5d. Make sure it isn't being deleted
* 6. User wants to finish a process
* 6a. Make sure process belongs to server user owns
* 6b. Check if it is a research process
* 6b1. Make sure no circumstances have happened to cause the server to not be
* able to hold the research
* 6b2. Finish up the research and grant the version
* 7. User wants to cancel a process
* 7a. Make sure process belongs to server user owns
* 7b. Cancel the process
* 8. User wants to delete a file
* 8a. Make sure file belongs to user
* 8b. Make sure no other operations are being performed
* 8c. Start process
*/
require_once( 'private/users.php' );
require_once( 'private/servers.php' );
require_once( 'private/programs.php' );
require_once( 'private/processes.php' );
/**
* Validates that a server belongs to the user requesting the information
* @param int serverid The ID of the server
* @param Servers servers The Servers object
* @return array Server info from @see Servers::getServerByID
*/
function validateServerOwnership( $serverid, $servers )
{
$serverInfo = $servers->getServerByID( $serverid );
if( $serverInfo[ 'OWNER_ID' ] != $_SESSION[ 'ID' ] )
{
ahdie( 'You don\'t own this server nutmeg.' );
}
return $serverInfo;
}
/**
* Validates that a program belongs to the user requesting the information
* @param int programid The ID of the program
* @param Programs programs The Programs object
* @return array Program info from @see Programs::getProgramOwnerAndServerByID
*/
function validateProgramOwnership( $programid, $programs )
{
$programInfo = $programs->getProgramOwnerAndServerByID( $programid );
if( !isset( $programInfo[ 'USER_ID' ] ) ||
$programInfo[ 'USER_ID' ] != $_SESSION[ 'ID' ] )
{
ahdie( 'Performing action on program not owning.' );
}
return $programInfo;
}
/*********************************** STEP 1 ***********************************/
if( $action == 'requestservers' )
{
// Setup some local variables
$id = $_SESSION[ 'ID' ];
$servers = new Servers();
// Now we simply need to get the 2D array from servers
$result = $servers->getServersByOwner( $id );
// Echo out the array
echo2DArray( 'ownedServers', 'noOwnedServers', $result );
}
/*********************************** STEP 2 ***********************************/
elseif( $action == 'requestfreeserver' )
{
// User wants their free server, check if they have one already
$id = $_SESSION[ 'ID' ];
$servers = new Servers();
$ownerServers = $servers->getServersByOwner( $id );
// If they have a server then this will be an array...and not false
if( $ownerServers != false )
{
die( 'You already have servers.' );
}
// They don't have a server, great, give them one
$servers->addServer( $id );
// Now we simply need to get the 2D array from servers
$result = $servers->getServersByOwner( $id );
// If the user still doesn't have a server, this will return false
echo2DArray( 'ownedServers', 'noOwnedServers', $result );
}
/*********************************** STEP 3 ***********************************/
elseif( $action == 'viewserver' )
{
$id = $_REQUEST[ 'SERVER_ID' ];
$servers = new Servers();
$serverInfo = validateServerOwnership( $id, $servers );
// General Server Information plus layout the screen for programs/processes
echo 'beginServerView(' . cleanupRowForJS( $serverInfo ) . ');';
$programs = new Programs();
$allPrograms = $programs->getProgramsByServer( $id );
/*********************************** STEP 3b **********************************/
echo2DArray( 'serverPrograms', 'noServerPrograms', $allPrograms );
$processes = new Processes();
$allProcesses = $processes->getProcessesByServer( $id );
echo2DArray( 'serverProcesses', 'noServerProcesses', $allProcesses );
echo( 'endServerView();' );
}
/*********************************** STEP 4 ***********************************/
elseif( $action == 'freeprograms' )
{
$serverid = $_REQUEST[ 'SERVER_ID' ];
$servers = new Servers();
$serverInfo = validateServerOwnership( $serverid, $servers );
$programs = new Programs();
$serverPrograms = $programs->getProgramsByServer( $serverid );
/*********************************** STEP 4b **********************************/
$hasFWD = false;
$hasFWB = false;
$hasPWD = false;
$hasPWB = false;
foreach( $serverPrograms as $serverProgram )
{
switch( $serverProgram[ 'TYPE' ] )
{
case PROGRAM_TYPE_FIREWALL:
$hasFWD = true;
break;
case PROGRAM_TYPE_FIREWALLBREAKER:
$hasFWB = true;
break;
case PROGRAM_TYPE_PASSWORD:
$hasPWD = true;
break;
case PROGRAM_TYPE_PASSWORDBREAKER:
$hasPWB = true;
break;
default:
}
}
if( $hasFWD && $hasFWB && $hasPWD && $hasPWB )
{
ahdie( 'Stupid people trying to get what they already have!' );
}
// Alright, the person is eligible!
if( $hasFWD )
{
$id1 = 0;
}
else
{
$id1 = $programs->addProgram( $serverid, PROGRAM_TYPE_FIREWALL,
FIREWALL_SIZE, 1 );
}
if( $hasFWB )
{
$id2 = 0;
}
else
{
$id2 = $programs->addProgram( $serverid, PROGRAM_TYPE_FIREWALLBREAKER,
FIREWALLBREAKER_SIZE, 1 );
}
if( $hasPWD )
{
$id3 = 0;
}
else
{
$id3 = $programs->addProgram( $serverid, PROGRAM_TYPE_PASSWORD,
PASSWORD_SIZE, 1 );
}
if( $hasPWB )
{
$id4 = 0;
}
else
{
$id4 = $programs->addProgram( $serverid, PROGRAM_TYPE_PASSWORDBREAKER,
PASSWORDBREAKER_SIZE, 1 );
}
// And now we tell the user
echo "grantedFreePrograms($id1,$id2,$id3,$id4);";
}
/*********************************** STEP 5 ***********************************/
elseif( $action == 'startresearch' )
{
$programid = $_REQUEST[ 'PROGRAM_ID' ];
$programs = new Programs();
$servers = new Servers();
$processes = new Processes();
$programInfo = validateProgramOwnership( $programid, $programs );
$userid = $programInfo[ 'USER_ID' ];
$serverid = $programInfo[ 'SERVER_ID' ];
$programtype = $programInfo[ 'TYPE' ];
$serverInfo = $servers->getServerByID( $serverid );
$serverConsumption = $processes->getConsumptionByServer( $serverid );
$maxHDD = $serverInfo[ 'HDD' ];
$maxRAM = $serverInfo[ 'RAM' ];
$usedHDD = $programs->getServerUsage( $serverid );
$usedRAM = $serverConsumption[ 'USED_RAM' ];
$fileSize = getProgramSize( $programtype );
/*********************************** STEP 5b **********************************/
if( $fileSize + $usedHDD > $maxHDD )
{
echo( 'notEnoughFileSpace();' );
}
elseif( DEFAULT_RESEARCH_RAM + $usedRAM > $maxRAM )
{
echo( 'notEnoughRAM();' );
}
else
{
// Get all the processes that will increase the HDD usage on the server
$consumers = force2DArray(
$processes->getHDDConsumersByServer( $serverid ) );
foreach( $consumers as $consumer )
{
switch( $consumer[ 'OPERATION' ] )
{
case PROCESS_OP_COPY:
case PROCESS_OP_TRANSFER:
$usedHDD += $consumer[ 'SIZE' ];
break;
case PROCESS_OP_RESEARCH:
$usedHDD += getProgramSize( $consumer[ 'TYPE' ] );
break;
}
}
/*********************************** STEP 5c **********************************/
if( $usedHDD > $maxHDD )
{
echo( 'notEnoughFileSpace();' );
}
else
{
$programProcesses = $processes->getProcessesByProgram( $programid );
foreach( $programProcesses as $pp )
{
if( $pp[ 'OPERATION' ] == PROCESS_OP_DELETE )
{
ahdie( 'No researching a deleting program, duh!' );
}
}
// Alright, the user can research it
$researchid = $processes->addProcess( $programid, $serverid,
DEFAULT_RESEARCH_CPU, DEFAULT_RESEARCH_RAM, 0,
PROCESS_OP_RESEARCH, DEFAULT_RESEARCH_TIME );
$result = $processes->getProcessByID( $researchid );
$remainingCycles = $result[ 'CYCLES_REMAINING' ];
echo( "startedResearch($programid,$researchid,$remainingCycles);" );
}
}
}
/*********************************** STEP 6 ***********************************/
elseif( $action == 'finishprocess' )
{
// Get information about the process
$processid = $_REQUEST[ 'PROCESS_ID' ];
$processes = new Processes();
$processInfo = $processes->getProcessByID( $processid );
// Get information about the server owning the process
$serverid = $processInfo[ 'OWNING_SERVER' ];
$servers = new Servers();
$serverInfo = validateServerOwnership( $serverid, $servers );
// Look up the current HDD usage
$programid = $processInfo[ 'TARGET_PROGRAM' ];
$programs = new Programs();
$programInfo = $programs->getProgramByID( $programid );
$usedHDD = $programs->getServerUsage( $serverid );
$maxHDD = $serverInfo[ 'HDD' ];
/*********************************** STEP 6b **********************************/
if( $processInfo[ 'OPERATION' ] == PROCESS_OP_RESEARCH )
{
$programtype = $programInfo[ 'TYPE' ];
$fileSize = getProgramSize( $programtype );
/*********************************** STEP 6b1 *********************************/
if( $fileSize + $usedHDD > $maxHDD )
{
echo( 'notEnoughFileSpace();' );
}
else
{
/*********************************** STEP 6b2 *********************************/
$processes->deleteProcess( $processid, $serverid );
$programs->upgradeProgram( $programid, $programtype );
echo "finishedResearch($processid);";
}
}
elseif( $processInfo[ 'OPERATION' ] == PROCESS_OP_DELETE )
{
$processes->deleteProcess( $processid, $serverid );
$programs->deleteProgram( $programid );
echo "finishedDeletion($processid);";
}
else
{
ahdie( 'Unhandled operation...wtf...' );
}
}
/*********************************** STEP 7 ***********************************/
elseif( $action == 'cancelprocess' )
{
// Get information about the process
$processid = $_REQUEST[ 'PROCESS_ID' ];
$processes = new Processes();
$processInfo = $processes->getProcessByID( $processid );
// Get information about the server owning the process
$serverid = $processInfo[ 'OWNING_SERVER' ];
$servers = new Servers();
$serverInfo = validateServerOwnership( $serverid, $servers );
$processes->deleteProcess( $processid, $serverid );
echo "cancelledProcess($processid);";
}
/*********************************** STEP 8 ***********************************/
elseif( $action == 'startdelete' )
{
$programid = $_REQUEST[ 'PROGRAM_ID' ];
$programs = new Programs();
$programInfo = $programs->getProgramByID( $programid );
$serverid = $programInfo[ 'SERVER_ID' ];
$servers = new Servers();
$serverInfo = validateServerOwnership( $serverid, $servers );
$processes = new Processes();
$serverProcesses = $processes->getProcessesByProgram( $programid );
/*********************************** STEP 8b **********************************/
if( !empty( $serverProcesses ) )
{
ahdie( 'Can\'t delete stuff that has processes running against it.' );
}
/*********************************** STEP 8c **********************************/
$completionTime = $programInfo[ 'SIZE' ];
$processid = $processes->addProcess( $programid, $serverid,
DEFAULT_DELETION_CPU, DEFAULT_DELETION_RAM, 0, PROCESS_OP_DELETE,
"NOW()+$completionTime" );
$result = $processes->getProcessByID( $processid );
$remainingCycles = $result[ 'CYCLES_REMAINING' ];
echo "startedDeletion($programid,$processid,$remainingCycles);";
}
elseif( $action == 'exchangeprograms' )
{
$programid = $_REQUEST[ 'PROGRAM_ID' ];
$programs = new Programs();
$servers = new Servers();
$users = new Users();
$programInfo = validateProgramOwnership( $programid, $programs );
$userid = $programInfo[ 'USER_ID' ];
$serverid = $programInfo[ 'SERVER_ID' ];
$version = $programInfo[ 'VERSION' ];
$cpuUp = $_REQUEST[ 'CPU_UP' ];
$ramUp = $_REQUEST[ 'RAM_UP' ];
$hddUp = $_REQUEST[ 'HDD_UP' ];
$bwUp = $_REQUEST[ 'BW_UP' ];
$gpointUp = $_REQUEST[ 'GPOINTS_UP' ];
if( $cpuUp + $ramUp + $hddUp + $bwUp + $gpointUp != $version - 1 )
{
ahdie( 'Upgrading something other than that file.' );
}
$servers->adjustAllStats( $serverid, $cpuUp * STEP_CPU,
$ramUp * STEP_RAM, $hddUp * STEP_HDD,
$bwUp * STEP_BW );
$programs->deleteProgram( $programid, $serverid );
$users->adjustGatheringPoints( $userid, $gpointUp );
echo( "exchangedProgram($programid,$cpuUp,$ramUp,$hddUp,$bwUp);" );
}
else if( $action == 'changeservername' )
{
$serverid = $_REQUEST[ 'SERVER_ID' ];
$servers = new Servers();
$serverInfo = validateServerOwnership( $serverid, $servers );
$name = $_REQUEST[ 'NAME' ];
$servers->updateName( $serverid, $name );
echo( "changedServerName($serverid," . cleanupRowForJS( array( $name ) ) . ");" );
}
else if( $action == 'changeprogramname' )
{
$programid = $_REQUEST[ 'PROGRAM_ID' ];
$programs = new Programs();
$programInfo = validateProgramOwnership( $programid, $programs );
$name = $_REQUEST[ 'NAME' ];
$programs->updateName( $programid, $name );
echo( "changedProgramName($programid," .
cleanupRowForJS( array( $name ) ) . ");" );
}
// Update the processes and the server accordingly
if( isset( $servers ) && isset( $processes ) )
{
$modifiedServers = $processes->getModifiedServers();
if( !empty( $modifiedServers ) )
{
$infos = $servers->getCPUInfoForServers( $modifiedServers );
$onlyUpdate = array();
foreach( $infos as $serverInfo )
{
$updateArray = $processes->calculateServerRatio( $serverInfo[ 'ID' ],
$serverInfo[ 'CPU' ] );
$servers->updateCPUInfo( $serverInfo[ 'ID' ], $updateArray );
$lastUpdateTime = $processes->getLastUpdateTime();
echo( "lastServerUpdateTime($lastUpdateTime);" );
}
}
}
?><file_sep><?php
/*
* @TODO: Finish run program
* @TODO: Custom program names
* @TODO: Create NPC servers
* @TODO: Add virus table, type, GUI
* @TODO: Run virus
*/
require_once( 'private/defs.php' );
?><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html><head><meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title></title><?php
function writeCSS( $filebase )
{
echo "<link rel='stylesheet' type='text/css' href='" .
clientfile_buildRequest( 'C', $filebase ) . "'>";
}
function writeJS( $filebase )
{
echo "<script type='text/javascript' src='" .
clientfile_buildRequest( 'J', $filebase ) .
"'></script>";
}
writeCSS( JQUERY_UI_CSS );
writeCSS( JQUERY_LAYOUT_CSS );
writeCSS( JQUERY_QTIP );
writeCSS( 'main' );
foreach( $GLOBALS['JQUERY_FILES'] as $jqueryjs )
{
writeJS( $jqueryjs );
}
writeJS('plugins');
writeJS('main');
?></head><body><script type='text/javascript'>$(document).ready(function(){
//doLogin();
indexSetup();
});</script></body></html><file_sep><?php
/**
* Basic concept: Interface to the Programs MySQL table
*
* Uses:
*
*/
require_once( 'MySqlObject.php' );
class Programs extends MySQLObject
{
function getColumns( )
{
return array( 'ID', 'SERVER_ID', 'CUSTOM_NAME', 'TYPE', 'SIZE',
'VERSION' );
}
function getTableName( )
{
return 'PROGRAMS';
}
function addProgram( $serverid, $type, $size, $version )
{
return $this->insert( array( 'NULL', $serverid, '""', $type, $size,
$version ) );
}
function updateName( $programid, $newname )
{
$newname = '"' . mysql_real_escape_string( $newname ) . '"';
return $this->update( array( 'CUSTOM_NAME' => $newname ),
array( 'ID' => $programid ) );
}
function getProgramsByServer( $serverid )
{
return $this->get( array( 'SERVER_ID' => $serverid ),
array( 'ID' => 'ASC' ) );
}
function getAllPrograms( )
{
return $this->get( NULL, array( 'ID' => 'ASC' ) );
}
function upgradeProgram( $id, $type )
{
$sizeIncrease = getProgramSize( $type );
return $this->update( array('SIZE' => "SIZE+$sizeIncrease",
'VERSION' => 'VERSION+1'),
array('ID' => $id ));
}
function deleteProgram( $id )
{
return $this->delete( array( 'ID' => $id ) );
}
function getProgramByID( $programid )
{
return $this->getSingle( $programid );
}
// Returns program owner
function getProgramOwnerByID( $programid )
{
$temp = $this->getProgramOwnerAndServerByID( $programid );
return $temp[ 'USER_ID' ];
}
function getServerUsage( $serverid )
{
$ret = $this->get( array( 'SERVER_ID' => $serverid ), NULL, 1,
array( 'SUM(SIZE) AS TOTAL_SIZE' ) );
return $ret[ 0 ][ 'TOTAL_SIZE' ];
}
// Returns 2D array [ userID, serverID, programInfo... ]
function getProgramOwnerAndServerByID( $programid )
{
$programid = intval( $programid );
return $this->getCustom( 'SELECT U.ID AS USER_ID, S.ID AS SERVER_ID,' .
'P.* FROM PROGRAMS AS P INNER JOIN SERVERS ' .
'AS S ON ' .
'P.SERVER_ID=S.ID INNER JOIN USERS AS U ON ' .
"U.ID=S.OWNER_ID WHERE P.ID=$programid" );
}
}
?><file_sep><?php
/**
* Basic concept: Check all MySQL databases
*
* 1. Rebuild system table if needed
* 2. Upgrade tables based on what level the system version is
* 3. Update the system table
*/
/*********************************** STEP 1 ***********************************/
require_once 'defs.php';
require_once 'users.php';
$version = array();
$version[ 0 ] = "DROP TABLE IF EXISTS SYSTEM";
$version[ 1 ] = "CREATE TABLE `SYSTEM` ( \n" .
"`DUMMY_ID` int(10) unsigned NOT NULL AUTO_INCREMENT,\n" .
"`VERSION` int(10) unsigned NOT NULL,\n" .
"PRIMARY KEY (`DUMMY_ID`)\n" .
") ENGINE = MyISAM DEFAULT CHARSET=latin1";
$version[ 2 ] = "DROP TABLE IF EXISTS USERS, SERVERS";
$version[ 3 ] = "CREATE TABLE `USERS` (\n" .
"`ID` int(10) unsigned NOT NULL AUTO_INCREMENT,\n" .
"`NICK` varchar(20) NOT NULL,\n" .
"`PASSWORD` varchar(40) NOT NULL,\n" .
"`EMAIL` varchar(40) NOT NULL,\n" .
"PRIMARY KEY (`ID`)\n" .
") ENGINE = MyISAM DEFAULT CHARSET=latin1";
$version[ 4 ] = "CREATE TABLE `SERVERS` (\n" .
"`ID` int(10) unsigned NOT NULL AUTO_INCREMENT,\n" .
"`OWNER_ID` int(10) unsigned NOT NULL,\n" .
"`IP` int(10) unsigned NOT NULL,\n" .
"`CPU` int(10) unsigned NOT NULL,\n" .
"`RAM` int(10) unsigned NOT NULL,\n" .
"`HDD` int(10) unsigned NOT NULL,\n" .
"`BANDWIDTH` int(10) unsigned NOT NULL,\n" .
"PRIMARY KEY (`ID`)\n" .
") ENGINE = MyISAM DEFAULT CHARSET=latin1";
$version[ 5 ] = "DROP TABLE IF EXISTS PROGRAMS, PROCESSES";
$version[ 6 ] = "CREATE TABLE `PROGRAMS` (\n" .
"`ID` int(10) unsigned NOT NULL AUTO_INCREMENT,\n" .
"`SERVER_ID` int(10) unsigned NOT NULL,\n" .
"`TYPE` int(10) unsigned NOT NULL,\n" .
"`SIZE` int(10) unsigned NOT NULL,\n" .
"PRIMARY KEY (`ID`)\n" .
") ENGINE = MyISAM DEFAULT CHARSET=latin1";
$version[ 7 ] = "CREATE TABLE `PROCESSES` (\n" .
"`ID` int(10) unsigned NOT NULL AUTO_INCREMENT,\n" .
"`TARGET_PROGRAM` int(10) unsigned NOT NULL,\n" .
"`OWNING_SERVER` int(10) unsigned NOT NULL,\n" .
"`CPU_USAGE` int(10) unsigned NOT NULL,\n" .
"`RAM_USAGE` int(10) unsigned NOT NULL,\n" .
"`BW_USAGE` int(10) unsigned NOT NULL,\n" .
"`OPERATION` int(10) unsigned NOT NULL,\n" .
"PRIMARY KEY (`ID`)\n" .
") ENGINE = MyISAM DEFAULT CHARSET=latin1";
$version[ 8 ] = "ALTER TABLE PROGRAMS ADD COLUMN `VERSION` int(10) unsigned " .
"NOT NULL AFTER SIZE";
$version[ 9 ] = "ALTER TABLE PROCESSES ADD COLUMN `COMPLETION_TIME` " .
"datetime NOT NULL AFTER `OPERATION`";
$version[ 10 ] = "ALTER TABLE PROCESSES ADD COLUMN `LINKED_ID` int(10) " .
"unsigned NOT NULL AFTER `COMPLETION_TIME`";
$version[ 11 ] = "ALTER TABLE PROCESSES MODIFY COLUMN `COMPLETION_TIME` " .
"bigint unsigned NOT NULL";
$version[ 12 ] = "ALTER TABLE USERS ADD COLUMN `FLAGS` bigint unsigned " .
"NOT NULL DEFAULT 0 AFTER `EMAIL`";
$version[ 13 ] = "ALTER TABLE USERS MODIFY COLUMN `PASSWORD` char(41) NOT NULL";
$version[ 14 ] = "UPDATE USERS SET `PASSWORD` = PASSWORD(`<PASSWORD>`)";
$version[ 15 ] = "ALTER TABLE SERVERS ADD COLUMN `LAST_UPDATE_TIME` datetime " .
"NOT NULL AFTER `BANDWIDTH`";
$version[ 16 ] = "ALTER TABLE SERVERS ADD COLUMN `OPERATING_RATIO` " .
"decimal(8,4) NOT NULL AFTER `LAST_UPDATE_TIME`";
$version[ 17 ] = "ALTER TABLE PROCESSES DROP COLUMN `COMPLETION_TIME`";
$version[ 18 ] = "ALTER TABLE PROCESSES ADD COLUMN `CYCLES_COMPLETED` " .
"bigint unsigned NOT NULL AFTER `LINKED_ID`";
$version[ 19 ] = "ALTER TABLE PROCESSES ADD COLUMN `CYCLES_REMAINING` " .
"bigint unsigned NOT NULL AFTER `CYCLES_COMPLETED`";
$version[ 20 ] = "ALTER TABLE SERVERS MODIFY COLUMN `LAST_UPDATE_TIME` " .
"bigint unsigned NOT NULL";
$version[ 21 ] = "ALTER TABLE SERVERS ADD COLUMN `CUSTOM_NAME` varchar(15) " .
"NOT NULL AFTER `IP`";
$version[ 22 ] = "DROP TABLE IF EXISTS ERRORS";
$version[ 23 ] = "CREATE TABLE `ERRORS` (\n" .
"`ERROR_TIME` bigint unsigned NOT NULL,\n" .
"`DESCRIPTION` text NOT NULL,\n" .
"`POST_DATA` text NOT NULL,\n" .
"`SESSION_DATA` text NOT NULL,\n" .
"`IP` varchar(20) NOT NULL\n" .
") ENGINE = MyISAM DEFAULT CHARSET=latin1";
$version[ 24 ] = "CREATE TABLE `SOLVED_MATH` (\n" .
"`USER_ID` int(10) unsigned NOT NULL,\n" .
"`DIFFICULTY` int(10) unsigned NOT NULL,\n" .
"`DATE_ACCOMPLISHED` int(10) unsigned NOT NULL,\n" .
"`HOUR_ACCOMPLISHED` int(10) unsigned NOT NULL,\n" .
"`COUNT` int(10) unsigned NOT NULL,\n" .
"PRIMARY KEY(USER_ID, DIFFICULTY, DATE_ACCOMPLISHED," .
"HOUR_ACCOMPLISHED)) ENGINE = MyISAM DEFAULT CHARSET=latin1";
$version[ 25 ] = "ALTER TABLE `SOLVED_MATH` ADD UNIQUE INDEX (USER_ID," .
"DIFFICULTY,DATE_ACCOMPLISHED,HOUR_ACCOMPLISHED)";
$version[ 26 ] = "ALTER TABLE PROGRAMS ADD COLUMN `CUSTOM_NAME` varchar(15) " .
"NOT NULL AFTER `SERVER_ID`";
$version[ 27 ] = "CREATE TABLE `USER_DISABLED_MODULES` (\n" .
"`USER_ID` int(10) unsigned NOT NULL,\n" .
"`MODULE_NAME` varchar(20) NOT NULL,\n" .
"`DISABLE_TIME` bigint unsigned NOT NULL\n," .
"UNIQUE KEY(USER_ID,MODULE_NAME)" .
") ENGINE = MyISAM DEFAULT CHARSET=latin1";
$version[ 28 ] = "ALTER TABLE `USERS` ADD COLUMN `GATHERING_POINTS` bigint " .
"unsigned NOT NULL AFTER `EMAIL`";
$version[ 29 ] = "CREATE TABLE `ITEM_TYPES` (\n" .
"`ID` int(10) unsigned NOT NULL AUTO_INCREMENT,\n" .
"`NAME` varchar(20) NOT NULL,\n" .
"`DESCRIPTION` TEXT NOT NULL,\n" .
"`MIN_PROB` int(10) unsigned NOT NULL,\n" .
"`MAX_PROB` int(10) unsigned NOT NULL,\n" .
"PRIMARY KEY(`ID`)\n" .
") ENGINE = MyISAM DEFAULT CHARSET=latin1";
// Perform actual query to find out what tables MySQL has
$allTablesResult = mysql_query( 'SHOW TABLES FROM ' . DB_NAME );
if( !$allTablesResult )
{
die( 'Failed to show tables.' . mysql_error() );
}
// Actually pull out each row which only has the name of the table
// We set the value to 1 for fun, and the index to the name of the table
// See below for the key intersection
$foundSystem = false;
$allTables = array();
while( $row = mysql_fetch_row( $allTablesResult ) )
{
$allTables[] = $row[ 0 ];
if( strcasecmp( $row[ 0 ], 'system' ) == 0 )
{
$foundSystem = true;
break;
}
}
/*********************************** STEP 2 ***********************************/
if( !$foundSystem )
{
$startVersion = 0;
}
else
{
$versionResult = mysql_query( 'SELECT VERSION FROM SYSTEM' );
if( !$versionResult )
{
die( 'Failed to get version with system table available.' );
}
$row = mysql_fetch_row( $versionResult );
if( !is_array( $row ) )
{
$startVersion = 0;
echo "Couldn't find a valid version, setting version to 0. ";
}
else
{
$startVersion = $row[ 0 ];
}
}
/*********************************** STEP 3 ***********************************/
$rowCount = count( $version );
$actualVersion = $startVersion;
if( $rowCount == $actualVersion )
{
die( "No update needed. Already at version $actualVersion." );
}
for( $i = $startVersion; $i < $rowCount; $i++, $actualVersion++ )
{
$query = $version[ $i ];
$result = mysql_query( $query );
if( !$result )
{
echo "Failed to perform query: $query\n";
echo mysql_error();
break;
}
}
/*********************************** STEP 4 ***********************************/
if( $actualVersion != $startVersion )
{
$result = mysql_query( "INSERT INTO SYSTEM VALUES( 1, $actualVersion ) " .
"ON DUPLICATE KEY UPDATE VERSION=$actualVersion" );
}
if( !$result )
{
die( 'Failed to update system at end.' );
}
echo( "UPDATE SUCCESSFUL!!! Updated from $startVersion to $actualVersion." );
?><file_sep><?php
require_once( 'private/userdisabledmodules.php' );
if( $action == 'opt_request' )
{
// User wants to know their options! Life and death.
$userid = $_SESSION[ 'ID' ];
$allModules = opt_getValidModules();
$userDisabledModules = new UserDisabledModules();
// Get the disabled modules
$disabledModules = $userDisabledModules->getDisabledModules( $userid );
if( !isset( $disabledModules[ 0 ] ) && count( $disabledModules ) )
{
$disabledModules = array( $disabledModules );
}
$disabledModuleNames = array();
$disabledModuleString = '';
foreach( $disabledModules as $disabledModule )
{
$disabledModuleNames[] = $disabledModule[ 'MODULE_NAME' ];
$disabledString = cleanupRowForJS( $disabledModule );
$disabledModuleString .= "[$disabledString],";
}
$disabledModuleString = rtrim( $disabledModuleString, ',' );
// Diff the disabled from all to get the enabled modules
$enabledModules = array_diff( $allModules, $disabledModuleNames );
echo 'opt_modules([' . cleanupRowForJS( $enabledModules ) . '],[' .
$disabledModuleString . ']);';
}
elseif( $action == 'opt_disablemodules' )
{
// User wants to disable some modules
// Variables from the request variables
$userid = $_SESSION[ 'ID' ];
$moduleString = $_REQUEST[ 'MODULES' ];
$moduleArray = explode( ',', $moduleString );
// Die if the module array is empty
if( empty( $moduleArray ) )
{
ahdie( 'Empty array for disabling modules.' );
}
$invalidModules = array_diff( $moduleArray, opt_getValidModules() );
if( count( $invalidModules ) )
{
ahdie( "Invalid Modules:" . implode( ',', $invalidModules ) );
}
// Disable the modules
$userDisabledModules = new UserDisabledModules();
$disabled = $userDisabledModules->disableModules( $userid, $moduleArray );
// Inform the user how many modules were disabled
echo( "disabledModules($disabled);" );
}
elseif( $action == 'opt_enablemodules' )
{
// User wants to enable some modules
// Variables from the request variables
$userid = $_SESSION[ 'ID' ];
$moduleString = $_REQUEST[ 'MODULES' ];
$moduleArray = explode( ',', $moduleString );
// Die if the module array is empty
if( empty( $moduleArray ) )
{
ahdie( 'Empty array for disabling modules.' );
}
// Enable the modules
$userDisabledModules = new UserDisabledModules();
$enabled = $userDisabledModules->enableModules( $userid, $moduleArray );
// Get the list of valid modules
$validModules = opt_getValidModules();
// Inform the user how many modules were enabled
echo( "enabledModules($enabled);" );
foreach( $moduleArray as $module )
{
$base = strtolower( $module );
$cssarray = $validModules[ $module ];
if( !empty( $cssarray ) )
{
foreach( $cssarray as $csssheet )
{
echo( "addStylesheet('" .
clientfile_buildRequest( 'C', $csssheet ) . "');" );
}
}
echo( "addScriptElement('" . clientfile_buildRequest( 'J', $base) .
"');" );
}
}
?><file_sep><?php
/**
* Basic concept: Provides a low-level class for all MySQL objects
*
* Uses:
* get() : Gets values(2-D array) from DB based on parameters
* insert() : Adds values to DB
* delete() : Delete values in DB
* update() : Update values in DB
* getSingle() : Gets a single entry (1-D array) from DB by index
* getMinimizedCreator() : Gets the creator in a minimized format
* minimizeCreator() : Minimizes the input parameter to compress it
* escapifyString() : Escapes a string for use in the DB
* getOnlyColumn() : Gets signle column(1-D array) based on parameters
* getTypedResult() : Extracts rows from a result with proper types set
*
* Abstracts:
* getColumns() : Return an array with column names in the values
* getTableName() : Return the table name
*
* Todo:
* Expand get to allow OR in the WHERE
*/
abstract class MySQLObject
{
/**
* Overload to provide low level with all of the columns
* @return array Names for each column
*/
abstract protected function getColumns();
/**
* Overload to provide low level with the name of the table
* @return string Table name
*/
abstract protected function getTableName();
/**
* Determines if SQL statements should die on failure
*/
public static $dieOnFailure = True;
/**
* Simplifies performing a SELECT statement
*
* @param array $filters key/value for column name/value
* @param array $orders key for each column, value is ASC or DESC
* @param int $limit Integer to limit the number of results
* @param array $onlyColumns Set for only specific column, NULL means *
* @param int $offset Integer to offset the results by
* @param array $groupby Array of values to group on
* @return array @see getTypedResult
*/
public function get( $filters = NULL, $orders = NULL, $limit = 0,
$onlyColumns = NULL, $offset = 0, $groupby = NULL )
{
$sql = 'SELECT ';
// Only specific columns to pull
if( !is_array( $onlyColumns ) )
{
$sql .= '* ';
}
else
{
$sql .= implode( ', ', $onlyColumns ) . ' ';
}
// Table name
$sql .= 'FROM `' . $this->getTableName() . '` ';
// Filters
$sql .= $this->arrayToFilterString( $filters );
// Grouping
if( is_array( $groupby ) )
{
$sql .= 'GROUP BY ' . implode( ', ', $groupby ) . ' ';
}
// Ordering
if( is_array( $orders ) && count( $orders ) > 0 )
{
$sql .= 'ORDER BY ';
$orderNames = array_keys( $orders );
for( $i = 0; $i < count( $orders ); $i++ )
{
$orderName = $orderNames[$i];
$sql .= "$orderName {$orders[$orderName]} ";
if( $i < count( $orders ) - 1 )
{
$sql .= ', ';
}
}
}
// Offset/Limit
if( $limit || $offset )
{
$sql .= "LIMIT $offset";
if( $limit > 0 )
{
$sql .= ", $limit";
}
}
// SQL statement is done, run it!
$result = mysql_query( $sql );
if( !$result )
{
if( MySQLObject::$dieOnFailure )
{
die( 'MySQL Query Error: ' . mysql_error() . "\n$sql" );
}
return array( );
}
return $this->getTypedResult( $result );
}
/**
* Simplifies performing an INSERT statement.
* Values are not checked for consistency, instead an error will be thrown
* if the insert fails. An optional parameter exists that allows for a
* 'ON DUPLICATE KEY UPDATE' clause to be appended to the INSERT clause.
* If the parameter is set then the UPDATE clause is populated by the
* key/value pairs from the parameter. If the first parameter has arrays
* as children then each of the arrays will be concatenated together
* properly.
*
* @param array $values The values to insert
* @param array $duplicatepairs If set, will fill ON DUPLICATE KEY UPDATE
* @return int The last insert ID
*/
public function insert( $values, $duplicatepairs = NULL )
{
// Make sure that values is an array that has arrays as children
if( !is_array( $values[ 0 ] ) )
{
$values = array( $values );
}
// Now convert each of the array children to a string
foreach( $values as &$child )
{
if( is_array( $child ) )
{
$child = '(' . implode( ',', $child ) . ')';
}
}
// Build the statement
$sql = 'INSERT INTO ' . $this->getTableName() . ' VALUES' .
implode( ',', $values );
// Check if UPDATE clause requested
if( is_array( $duplicatepairs ) && count( $duplicatepairs ) )
{
$sql .= ' ON DUPLICATE KEY UPDATE ' .
$this->createPairString( $duplicatepairs, ',' );
}
// Execute the query
$result = mysql_query( $sql );
if( !$result )
{
if( MySQLObject::$dieOnFailure )
{
die( 'MySQL Query Error: ' . mysql_error() . "\n$sql" );
}
return array();
}
return mysql_insert_id();
}
/**
* Simplifies performing a DELETE statement
* @param array $filters Uses @see arrayToFilterString, NULL is *bad*
* @return int Number of rows deleted
*/
public function delete( $filters )
{
$sql = 'DELETE FROM ' . $this->getTableName();
$sql .= $this->arrayToFilterString( $filters );
$result = mysql_query( $sql );
if( !$result )
{
if( MySQLObject::$dieOnFailure )
{
die( 'MySQL Query Error: ' . mysql_error() . "\n$sql" );
}
return array( );
}
return mysql_affected_rows();
}
/**
* Simplifies performing an UPDATE statement
* @param array $values The key/values to update (strings must be escaped)
* @param array $conditions The filter (@see arrayToFilterString)
* @return int Number of rows affected
*/
public function update( $values, $conditions = NULL )
{
$sql = 'UPDATE ' . $this->getTableName() . ' SET ';
$valueKeys = array_keys( $values );
// Build the update sequence
for( $i = 0; $i < count( $values ); $i++ )
{
$valueKey = $valueKeys[$i];
$value = $values[$valueKey];
$sql .= "$valueKey=$value ";
if( $i < count( $values ) - 1 )
{
$sql .= ', ';
}
}
// Add conditions
$sql .= $this->arrayToFilterString( $conditions );
$result = mysql_query( $sql );
if( !$result )
{
if( MySQLObject::$dieOnFailure )
{
die( 'MySQL Query Error: ' . mysql_error() . "\n$sql" );
}
return array( );
}
return mysql_affected_rows();
}
/**
* Utilizes @see getColumns to get a row based on the first column.
*
* @param int $value The ID in the first column to search for
* @return array The row with the ID in the first column
*/
public function getSingle( $value )
{
$columns = $this->getColumns();
$columnStr = $columns[0];
$ret = $this->get( array( $columnStr => $value ), NULL, 1 );
if( count( $ret ) == 0 )
{
return false;
}
return $ret[0];
}
/**
* Transforms a string into a MySQL friendly string
*
* @param string $input String to escapify
* @return string Escapified string (includes quotes 'input')
*/
protected function escapifyString( $input )
{
return "'" . mysql_real_escape_string( $input ) . "'";
}
/**
* Gets only a single column from the table
*
* @param string $columnName Name of the column to retrieve from the table
* @param string $order ASC or DESC
* @param int $limit Limit the number of results, 0 for no limit
* @param array $filters The key/values to filter on
* @return array Single array with each column's value
*/
public function getOnlyColumn( $columnName, $order = 'DESC', $limit = 0,
$filters = NULL )
{
$sql = "SELECT `$columnName` from `" . $this->getTableName() . '` ';
// Add filters
$sql .= $this->arrayToFilterString( $filters );
// Add ordering
$sql .= " ORDER BY `$columnName` ";
if( $order == 'ASC' )
{
$sql .= $order;
}
else
{
$sql .= 'DESC';
}
if( $limit > 0 )
{
$sql .= " LIMIT $limit";
}
$result = mysql_query( $sql );
if( !$result )
{
if( MySQLObject::$dieOnFailure )
{
die( 'MySQL Query Error: ' . mysql_error() . "\n$sql" );
}
return array();
}
$ret = array( );
while( $row = mysql_fetch_assoc( $result ) )
{
$ret[] = $row[0];
}
return $ret;
}
/**
* Performs a custom SQL statement
*
* @param string $sql Query to perform
* @return array Piped through @see getTypedResult
*/
public static function getCustom( $sql )
{
$result = mysql_query( $sql );
if( !$result )
{
if( MySQLObject::$dieOnFailure )
{
die( 'MySQL Query Error: ' . mysql_error() . "\n$sql" );
}
return array();
}
$ret = MySQLObject::getTypedResult( $result );
if( count( $ret ) == 1 )
{
$ret = $ret[0];
}
return $ret;
}
/**
* Converts an array of filters into a string. Takes a key/value array as
* input and returns a WHERE key=value clause. Accepts arrays as values
* and appropriately converts clause to key IN (values...).
*
* @param array $filters Key/values to filter on
* @return string WHERE clause or empty string
*/
private function arrayToFilterString( $filters )
{
// Filters
if( is_array( $filters ) && count( $filters ) > 0 )
{
return ' WHERE ' . $this->createPairString( $filters, 'AND', true );
}
return '';
}
/**
* Converts an array of key/value pairs into a string. Takes a key/value
* arrary as input and returns a key=value clause. If key IN(values...) is
* required set the second parameter to non-NULL.
*
* @param array $pairs Key/values to populate string with
* @param string $delimiter Delimiter to go between each pair
* @param boolean $parseIn Set to true to parse key IN(values...)
* @return string Constructed clause
*/
private function createPairString( $pairs, $delimiter, $parseIn = NULL )
{
$sql = '';
// Make sure $pairs is a valid array or just return blank
if( is_array( $pairs ) && count( $pairs ) > 0 )
{
// Extract the keys for easier use then iterate over the array
$pairKeys = array_keys( $pairs );
for( $i = 0; $i < count( $pairs ); $i++ )
{
// Get the key and the value
$pairKey = $pairKeys[ $i ];
$pair = $pairs[ $pairKey ];
// If the sub key is an array and parseIn is set to true, then
// add a IN() clause, otherwise skip it
if( is_array( $pair ) )
{
if( $parseIn )
{
$sql .= "$pairKey IN (" . join( ',', $pair ) . ') ';
}
}
else
{
$sql .= "$pairKey=$pair ";
}
// Add a comma if this isn't the last element
if( $i < count( $pairs ) - 1 )
{
$sql .= "$delimiter ";
}
}
return $sql;
}
return '';
}
/**
* Adjusts a single values based on its rows' ID.
* @param int $id The value to search for in the first column
* @param string $field The field to update
* @param int $amount The amount to increase the field's value by
* @return array @see update
*/
protected function adjustSingleByID( $id, $field, $amount )
{
$columns = $this->getColumns();
$indexStr = $columns[0];
return $this->update( array( $field => "$field+$amount" ),
array( $indexStr => $id ) );
}
/**
* Gets all values from a table
*
* @param string $tableName The name of the table to retrieve
* @return array @see getCustom
*/
public static function getAll( $tableName )
{
return MySQLObject::getCustom( "SELECT * FROM `$tableName`" );
}
/**
* Utility function to return an array properly formatted as a JS string
*
* @param string $tableName The name of the table to retreive
* @return string [[values...],...]
*/
public static function getAllAsJS( $tableName )
{
// Set up some basic variables
$arr = MySQLObject::getAll( $tableName );
$ret = '[';
$rcount = count( $arr );
// Iterate over every row in the array
for( $r = 0; $r < $rcount; $r++ )
{
// The current row
$row = $arr[$r];
$ret .= '[';
$ccount = count( $row );
// The keys for the current row (this shouldn't change...)
$keys = array_keys( $row );
// Iterate over every column
for( $c = 0; $c < $ccount; $c++ )
{
// If it's a string, enclose it in quotes otherwise append it
$column = $row[$keys[$c]];
if( is_string( $column ) )
{
$ret .= "'$column'";
}
else
{
$ret .= $column;
}
// Add a comma if its not the last column
if( $c < $ccount - 1 )
{
$ret .= ',';
}
}
// Close the row and add a comma if its not the last row
$ret .= ']';
if( $r < $rcount - 1 )
{
$ret .= ',';
}
}
// Close the array and return
$ret .= ']';
return $ret;
}
/**
* Ensures that each result is the proper type before returning it.
* Checks the table to make sure floats and ints are properly converted
* to such rather than returning them as strings.
*
* @param object $result The row of MySQL data to work with
* @return array Array of properly typed results
*/
public static function getTypedResult( $result )
{
$ret = array( );
$columnInfo = array( );
$columns = mysql_num_fields( $result );
// Iterate over every column and determine if it's a string
for( $i = 0; $i < $columns; $i++ )
{
$rowInfo = mysql_fetch_field( $result );
if( !$rowInfo->blob )
{
$columnInfo[$rowInfo->name] = $rowInfo->type;
}
}
// Iterate over the mysql result and fetch each row
while( $row = mysql_fetch_assoc( $result ) )
{
foreach( $columnInfo as $colIndex => $colValue )
{
// If the row is an int or datetime convert it to an integer
// Or if its a float convert it to a float
if( $colValue == 'int' || $colValue == 'datetime' )
{
$row[$colIndex] = intval( $row[$colIndex] );
}
else if( $colValue == 'real' )
{
$row[$colIndex] = floatval( $row[$colIndex] );
}
}
// Add the row to the resulting array
$ret[] = $row;
}
return $ret;
}
}
?><file_sep><?php
/**
* @file ah_server.php
*
* Basic concept: Handle admin related ajax commands
*
* Handled actions:
* a_gettables : Gets tables that are available
* a_gettable : Prints out a specific table
* a_runsqlselect : Executes a SQL select statement
* a_runsqlother : Executes a SQL insert/update/delete statement
*/
require_once( 'defs.php' );
require_once( 'MySQLObject.php' );
if( $action == 'a_gettables' )
{
// Get the list of tables from the DB directly
$allTables = MySQLObject::getCustom( 'SHOW TABLES' );
// The result is a column that has to be changed to a row
$count = count( $allTables );
$row = array();
$keys = array_keys( $allTables[ 0 ] );
$key = $keys[ 0 ];
for( $i = 0; $i < $count; $i++ )
{
$row[] = $allTables[ $i ][ $key ];
}
// Echo it out
echo 'admin_addTables([' . cleanupRowForJS( $row ) . ']);';
}
elseif( $action == 'a_gettable' )
{
$table = $_REQUEST[ 'TABLE' ];
// Get all the entries
$ret = MySqlObject::getAll( $table );
// If there aren't any entries, skip adding headers
if( !empty( $ret ) )
{
if( !isset( $ret[ 0 ] ) )
{
$ret = array( $ret );
}
// There are entries, add the headers
$columns = array_keys( $ret[ 0 ] );
array_unshift( $ret, $columns );
}
// Echo it out
echo2DArray('admin_tableView', 'admin_noTableView', $ret );
}
elseif( $action == 'a_runsqlselect')
{
// First thing first, run the SQL the admin wants
MySQLObject::$dieOnFailure = false;
$sql = $_REQUEST[ 'SQL' ];
$ret = MySqlObject::getCustom( $sql );
$err = mysql_real_escape_string( mysql_error() );
$columns = array();
// ret will be empty if there are no entries
if( !empty( $ret ) )
{
// Just in case it managed to get an array of empty arrays
if( !isset( $ret[ 0 ] ) )
{
$ret = array( $ret );
}
$columns = array_keys( $ret[ 0 ] );
}
// Check if there was an error
if( $err == '' )
{
// Format the table for output
echo( "admin_selectSQLResult([" . cleanupRowForJS( $columns ) . '],[' );
for( $i = 0; $i < count( $ret ); $i++ )
{
echo( '[' . cleanupRowForJS( $ret[ $i ] ) . ']' );
if( $i < count( $ret ) - 1 )
{
echo( ',' );
}
}
echo( ']);' );
}
else
{
echo( "admin_selectSQLResult(undefined,undefined,\"$err\");" );
}
}
elseif( $action == 'a_runsqlother')
{
// Other SQL is a lot easier, run it
MySQLObject::$dieOnFailure = false;
$sql = $_REQUEST[ 'SQL' ];
$ret = mysql_query( $sql );
$err = mysql_real_escape_string( mysql_error() );
$rows = mysql_affected_rows();
// If there was an error, spit it out, otherwise print affected rows
if( $err == '' )
{
echo( "admin_otherSQLResult($rows);" );
}
else
{
echo( "admin_otherSQLResult(undefined,\"$err\");" );
}
}
elseif( $action == 'a_runcssjsclear' )
{
deleteAll( 'js/cache', true );
deleteAll( 'css/cache', true );
echo( 'admin_setMaintenanceStatus("Cache cleared successfully.");' );
}
elseif( $action == 'a_disablemodules' )
{
$modules = preg_replace( '/[^a-zA-Z0-9\,]/', '', $_REQUEST[ 'MODULES' ] );
$modules = strtoupper( $modules );
$t = "<?\ndefine('ADMIN_DISABLED_MODULES','$modules');\n?>";
file_put_contents( $_SERVER['DOCUMENT_ROOT'] . '/LAD/private/inc.php', $t );
}
function deleteAll( $directory, $empty = false )
{
if( substr( $directory, -1 ) == '/' )
{
$directory = substr( $directory, 0, -1 );
}
if( !file_exists( $directory ) || !is_dir( $directory ) ||
!is_readable( $directory ) )
{
return false;
}
$directoryHandle = opendir( $directory );
while( $contents = readdir( $directoryHandle ) )
{
if( $contents != '.' && $contents != '..' )
{
$path = "$directory/$contents";
if( is_dir( $path ) )
{
deleteAll( $path );
}
else
{
unlink( $path );
}
}
}
closedir( $directoryHandle );
if( $empty == false )
{
if( !rmdir( $directory ) )
{
return false;
}
}
return true;
}
?>
<file_sep><?php
require_once( 'MySQLObject.php' );
/**
* Class for managing the solved_math DB table. Provides various functions for
* selecting results based on various criteria along with inserting new
* values based on a user ID, the difficulty and the number accomplished.
*/
class SolvedMath extends MySQLObject
{
/**
* Gets the columns for the solved_math table
* @return array Column names
*/
function getColumns()
{
return array( 'USER_ID', 'DIFFICULTY', 'DATE_ACCOMPLISHED',
'HOUR_ACCOMPLISHED', 'COUNT' );
}
/**
* Gets the name of the table
* @return string 'SOLVED_MATH'
*/
function getTableName()
{
return 'SOLVED_MATH';
}
/**
* Adds the number of solved problems performed by the user.
*
* @param int $userid User ID that performed the problems
* @param int $difficulty Difficulty of the problems
* @param int $count Number of problems completed
* @return int Affected rows
*/
function addSolved( $userid, $difficulty, $count )
{
// YYYYMMDD format - 20101231
$day = intval( date( 'Ymd' ) );
// Two digit hour format
$hour = intval( date( 'H' ) );
// Either insert a new entry or add to the current hour's entry
$r = $this->insert( array( $userid, $difficulty, $day, $hour, $count ),
array( 'COUNT' => "COUNT+$count") );
return $r;
}
/**
* Gets the number of solved problems for a specific user. Will return rows
* grouped by either hour or day depending on parameter.
*
* @param int $userid User ID that performed the problems
* @param boolean $groupday Set to true to group by day rather than day/hour
* @return array 2D array of all the problems categorized by hour
*/
function getAllProblemsByUser( $userid, $groupday = false )
{
$columns = NULL;
$groupby = NULL;
if( $groupday )
{
$columns = array( 'USER_ID', 'DIFFICULTY', 'DATE_ACCOMPLISHED',
'SUM(COUNT) AS COUNT' );
$groupby = array( 'DATE_ACCOMPLISHED', 'DIFFICULTY' );
}
return $this->get( array( 'USER_ID' => $userid ), NULL, 0,
$columns, 0, $groupby );
}
/**
* Gets the sum of all the problems solved for each difficulty based on a
* given user ID.
*
* @param int $userid User ID that performed the problems
* @return array 2D array of [DIFFICULTY,COUNT] grouped by DIFFICULTY
*/
function getProblemsByUser( $userid )
{
return $this->get( array( 'USER_ID' => $userid ),
NULL,
0,
array( 'DIFFICULTY',
'IFNULL(SUM(COUNT),0) AS COUNT' ),
0,
array( 'DIFFICULTY' ) );
}
/**
* Gets the sum of all the problems solved for each difficulty. If the
* optional parameter is set then the return array has the USER_ID column
* at the front.
*
* @param boolean $groupByUser Set to true to also group by user ID
* @return array 2D array of [DIFFICULTY,COUNT] grouped by DIFFICULTY
*/
function getProblems( $groupByUser = false )
{
// Set up default group/return columns
$columns = array( 'DIFFICULTY', 'IFNULL(SUM(COUNT),0) AS COUNT' );
$groups = array( 'DIFFICULTY' );
// Add user to group/return columns if the parameter is set
if( $groupByUser )
{
array_unshift( $columns, 'USER_ID' );
array_unshift( $groups, 'USER_ID' );
}
return $this->get( NULL, NULL, 0, $columns, 0, $groups );
}
/**
* Groups all problems on difficulty, date and hour performed. If the
* optional parameter is set then the array is also grouped on userid,
* which simply sorts the entire table.
*
* @param boolean $groupByUser Set to true to group by user
* @return array 2D array of [DATE,HOUR,DIFFICULTY,COUNT]
*/
function getAllProblems( $groupByUser = false )
{
if( $groupByUser )
{
return $this->get( NULL, array( 'USER_ID' => 'ASC',
'DATE_ACCOMPLISHED' => 'ASC',
'HOUR_ACCOMPLISHED' => 'ASC' ) );
}
$groups = array( 'DATE_ACCOMPLISHED', 'HOUR_ACCOMPLISHED',
'DIFFICULTY' );
$columns = array( 'DATE_ACCOMPLISHED AS DATE',
'HOUR_ACCOMPLISHED AS HOUR', 'DIFFICULTY',
'IFNULL(SUM(COUNT),0) AS COUNT' );
return $this->get( NULL, NULL, 0, $columns, 0, $groups );
}
/**
* Groups all problems on difficulty and optionally by user. Filters based
* on the given day. Day is in internal format 'YYYYMMDD'.
*
* @param string/int $day Day to filter on
* @param boolean $groupByUser Results should be grouped by user.
* @return array 2D array of [DIFFICULTY,COUNT]
*/
function getProblemsByDay( $day, $groupByUser = false )
{
$filter = array( 'DATE_ACCOMPLISHED' => $day );
$groups = array( 'DIFFICULTY' );
$columns = array( 'DIFFICULTY', 'IFNULL(SUM(COUNT),0) AS COUNT');
if( $groupByUser )
{
array_unshift( $groups, 'USER_ID' );
array_unshift( $columns, 'USER_ID' );
}
return $this->get( $filter, NULL, 0, $columns, 0, $groups );
}
}
?>
<file_sep><?php
/**
* @file ajaxhandler.php
*
* Basic concept: Handle ajax calls to and from the server
*
* Valid $action values:
* newuser1 = Step 1 in user creation process
* newuser2 = Step 2 in user creation process
* login = User is logging in
* passreset = Sends user a new password
* requestservers = User is requesting their list of servers
* requestfreeserver = User wants their first server for free
* viewserver = User wants to see all information about a server
* freeprograms = User is requesting their free programs
* startresearch = User wants to start researching a program
* finishprocess = User wants to complete a process
* cancelprocess = User wants to cancel a process
* startdelete = User wants to delete a file
* exchangeprograms = User wants to exchange programs for hardware upgrade
* changeservername = User wants to change the name of their server
* a_runcssjsclear = Links to ah_admin.php; process clearing of CSS & JS cache
*
* Session vars:
* ID = Sets the ID into session to help control authorization
*
* 1. Perform validation by only allowing specific actions when logged in
* Each action has it's own requirements plus the name of the file where it
* is run in an array.
* 2. Requests the action from main.js. Also forces a redirect back to
* index.php if not called via main.js.
*/
require_once( 'private/defs.php' );
require_once( 'private/errors.php' );
/**
* Call to add an error entry to the DB. This will also force the user's
* client to reset back to the home page. Aborts the script.
*
* @param string $reason Reason why the script is dying
*/
function ahdie( $reason )
{
$errors = new Errors;
$errors->addError( $reason );
die( 'forceRefresh();' );
}
/*********************************** STEP 1 ***********************************/
// NO_LOGIN flag - User does not have to be logged in to access
define( 'NO_LOGIN', 0x01 );
// ADMIN_ONLY flag - Only admin users allowed to access
define( 'ADMIN_ONLY', 0x02 );
$actionRequirements = array(
'login' => array( NO_LOGIN, 'login', array( 'username', 'password' ) ),
'newuser1' => array( NO_LOGIN, 'login', array( 'username', 'password' ) ),
'newuser2' => array( NO_LOGIN, 'login', array( 'email', 'cpassword' ) ),
'passreset' => array( NO_LOGIN, 'login', array( 'username', 'email' ) ),
'requestservers' => array( 0, 'server' ),
'requestfreeserver' => array( 0, 'server' ),
'viewserver' => array( 0, 'server', array( 'SERVER_ID' ) ),
'freeprograms' => array( 0, 'server', array( 'SERVER_ID' ) ),
'startresearch' => array( 0, 'server', array( 'PROGRAM_ID' ) ),
'finishprocess' => array( 0, 'server', array( 'PROCESS_ID' ) ),
'cancelprocess' => array( 0, 'server', array( 'PROCESS_ID' ) ),
'startdelete' => array( 0, 'server', array( 'PROGRAM_ID' ) ),
'exchangeprograms' => array( 0, 'server', array( 'PROGRAM_ID', 'CPU_UP',
'RAM_UP', 'HDD_UP', 'BW_UP' ) ),
'changeservername' => array( 0, 'server', array( 'SERVER_ID', 'NAME' ) ),
'changeprogramname' => array( 0, 'server', array( 'PROGRAM_ID', 'NAME' ) ),
'a_gettables' => array( ADMIN_ONLY, 'admin' ),
'a_gettable' => array( ADMIN_ONLY, 'admin', array( 'TABLE' ) ),
'a_runsqlselect' => array( ADMIN_ONLY, 'admin', array( 'SQL' ) ),
'a_runsqlother' => array( ADMIN_ONLY, 'admin', array( 'SQL' ) ),
'a_runcssjsclear' => array( ADMIN_ONLY, 'admin' ),
'a_disablemodules' => array( ADMIN_ONLY, 'admin', array( 'MODULES' ) ),
'nextmathquestion' => array( 0, 'math', array( 'DIFFICULTY', 'MODIFIERS')),
'opt_request' => array( 0, 'options' ),
'opt_disablemodules' => array( 0, 'options', array( 'MODULES' ) ),
'opt_enablemodules' => array( 0, 'options', array( 'MODULES' ) ),
'java_run' => array( 0, 'javabe' ),
'java_shutdown' => array( ADMIN_ONLY, 'javabe' )
);
// First of all make sure the action is set
/*********************************** STEP 2 ***********************************/
if( isset( $_REQUEST['action'] ) )
{
// Action is set, now make sure it's in our list of valid actions
$action = $_REQUEST['action'];
if( !isset( $actionRequirements[ $action ] ) )
{
// Not in the list, deny it
ahdie( 'Invalid action.' );
}
else
{
$currReq = $actionRequirements[ $action ];
// It's in the list, now we need to perform more checks
$requirements = $currReq[ 0 ];
// If the user needs to be logged in they won't have the NO_LOGIN bit
// in the requirements and they *should* have the session ID set
if( !( $requirements & NO_LOGIN ) && !isset( $_SESSION[ 'ID' ] ) )
{
ahdie( 'Action requires login.' );
}
// If the user isn't a admin but needs to be, deny
if( ( $requirements & ADMIN_ONLY ) && !( isset( $_SESSION['isAdmin'] )
&& $_SESSION['isAdmin'] ) )
{
ahdie( "Attempt to access admin command $action when not admin." );
}
// Now we check if there are bad parameters. The third column of the
// requirements is an array of required parameters. Check make sure
// each exists
if( isset( $currReq[ 2 ] ) && is_array( $currReq[ 2 ] ) )
{
foreach( $currReq[ 2 ] as $parameter )
{
if( !isset( $_REQUEST[ $parameter ] ) )
{
ahdie( "Missing request parameter $parameter." );
}
}
}
// Include the sub-file
require_once( 'private/ah_' . $currReq[ 1 ] . '.php' );
}
}
else
{
ahdie( 'Invalid request.' );
}
/*********************************** STEP 2i **********************************/
?><file_sep>createWindow( "Servers" );
addMenuButton( "Servers", "ui-icon-folder-open", requestServers);
function disableModuleSERVERS()
{
deleteAllElementsById( "Servers" );
deleteAllElementsById( "Explorer" );
}
function noOwnedServers()
{
getPopupContext( "Servers" ).html( "You don't have any servers!" )
.append( "<button id='requestfree'>Request a Free One</button>");
$('#requestfree').click(function( evt ){
doAjax( "requestfreeserver" );
});
resizePopup( "Servers" );
updateCache( "Servers", "Server-Overview" );
}
function ownedServers( list )
{
getPopupContext( "Servers" ).html(
"<table id='servertable'><thead><td>IP</td><td>Name</td><td>" +
"CPU</td><td>RAM</td><td>HDD</td><td>BW</td><td " +
"title='Operating Ratio'>OR</td></thead></table>" );
var serverids = new Array();
var cache = "Server-Overview";
for( var i = 0; i < list.length; i++ )
{
var obj = list[ i ];
var id = obj[ 0 ];
var row = $("<tr></tr>");
var customname = verifyServerName( id, obj[ 3 ] );
row.append( "<td><button href='#server-" + id + "' " +
"title='View Server' id='server-" + id + "-link'>" +
intToIP( obj[ 2 ] ) + "</button></td>" );
row.append( $("<td></td>").append(
createUpdateableInput( "server-" + id + "-customname",
customname, "changeservername", "SERVER_ID", id )
));
for( var j = 4; j < 8; j++ )
{
row.append( "<td>" + obj[ j ] + "</td>" );
}
row.append( "<td>" + obj[ 9 ] + "</td>" );
serverids[ i ] = id;
$('#servertable').append( row );
tempCache( "server-" + id + "-ip", obj[ 2 ], cache );
tempCache( "server-" + id + "-customname", obj[ 3 ], cache );
tempCache( "server-" + id + "-cpu", obj[ 4 ], cache );
tempCache( "server-" + id + "-ram", obj[ 5 ], cache );
tempCache( "server-" + id + "-hdd", obj[ 6 ], cache );
tempCache( "server-" + id + "-bw", obj[ 7 ], cache );
tempCache( "server-" + id + "-lastupdate", obj[ 8 ], cache );
tempCache( "server-" + id + "-operatingratio", obj[ 9 ], cache );
$('#server-' + id + "-link").click(function(){
doAjax( "viewserver", {
SERVER_ID: id
}, "Servers" );
});
}
tempCache( "servers", serverids.join(","), cache );
resizePopup( "Servers" );
updateCache( "Servers", "Server-Overview" );
}
function requestServers()
{
doAjax( "requestservers", undefined, "Servers" );
}
function notEnoughRAM()
{
alert( "Not enough RAM" );
}
function notEnoughFileSpace()
{
alert( 'Not enough file space!' );
}
function applyModificationToServerStat( objname, newvalue, good, modification,
callback )
{
if( callback == undefined )
{
callback = true;
}
if( $('#' + objname).html() == "" || newvalue == getTempCache( objname ) )
{
tempCache( objname, newvalue, "Server-View", callback );
return;
}
var style = good ? "positivemodifier" : "negativemodifier";
var obj;
if( $("#" + objname + "-modification").length == 0 )
{
var prefix = "<span class='" + style + "' id='" + objname +
"-modification'> ";
if( modification > 0 )
{
prefix += "+";
}
var postfix = "</span>";
obj = $(prefix + modification + postfix).appendTo( $('#' + objname) );
animateServerStatModification( obj, objname, newvalue, callback );
}
else
{
obj = $("#" + objname + "-modification");
animateServerStatModification( obj, objname, newvalue, callback );
}
}
function animateServerStatModification( obj, objname, newvalue, cb )
{
obj.queue([]);
obj.stop();
obj.animate( { opacity: 100 }, 1 );
obj.delay( 1000 )
.fadeOut( 100 )
.fadeIn( 100 )
.fadeOut( 100 )
.fadeIn( 100 )
.delay( 1000 )
.fadeOut( 300 )
.queue(function(){
tempCache( objname, newvalue, "Server-View", cb );
});
}
/**
* Verifies that the server name is valid. The name is not allowed to be blank
* so replace it with a valid string if it is.
*
* @param id ID of the server
* @param name Custom name of the server
* @return Custom name if it is not blank, Server #ID if it is
*/
function verifyServerName( id, name )
{
if( name == "" )
{
return "Server #" + id;
}
return name;
}<file_sep><?php
/**
* Basic concept: Interface to the User MySQL table
*
* Uses:
* addUser: Adds a user to the DB
* checkCombo: Checks if specified combo exists, return ID on success
* or False on failure
* checkEmailExists: Checks if email exists, return true/false
* checkUsernameExists: Checks if username exists, return true/false
* lookupUserDetails: Looks up a single user, return false/array of user
*/
require_once( 'MySQLObject.php' );
class Users extends MySQLObject
{
const ADMIN_FLAG = 0x000000000000001;
function getColumns( )
{
return array( 'ID', 'NICK', 'PASSWORD', 'EMAIL', 'GATHERING_POINTS',
'FLAGS' );
}
function getTableName( )
{
return 'USERS';
}
function addUser( $nick, $pass, $email )
{
$nick = $this->escapifyString( $nick );
$pass = $this->escapifyString( $pass );
$email = $this->escapifyString( $email );
return $this->insert( array( 'NULL', $nick, "PASSWORD($<PASSWORD>)",
$email, 0, 0 ) );
}
function checkCombo( $nick, $pass )
{
$nick = $this->escapifyString( $nick );
$pass = $this->escapifyString( $pass );
$val = $this->get( array( 'NICK' => $nick,
'PASSWORD' => "<PASSWORD>)" ) );
if( count( $val ) == 0 )
{
return false;
}
return $val[ 0 ];
}
function checkUsernameExists( $nick )
{
$nick = $this->escapifyString( $nick );
$val = $this->get( array( 'LOWER(NICK)' => "LOWER($nick)" ) );
return count( $val );
}
function checkEmailExists( $email )
{
$email = $this->escapifyString( $email );
$val = $this->get( array( 'LOWER(EMAIL)' => "LOWER($email)" ) );
return count( $val );
}
function checkEmailMatches( $nick, $email )
{
$nick = $this->escapifyString( $nick );
$email = $this->escapifyString( $email );
$val = $this->get( array( 'LOWER(NICK)' => "LOWER($nick)", 'LOWER(EMAIL)' => "LOWER($email)" ) );
if( count( $val ) == 0 )
{
return false;
}
return $val[ 0 ];
}
function ranPass( )
{
$chars = "abcdefghijkmnopqrstuvwxyz023456789";
srand((double)microtime()*1000000);
$i = 0;
$pass = '' ;
while ($i <= 7)
{
$num = rand() % 33;
$tmp = substr($chars, $num, 1);
$pass = $pass . $tmp;
$i++;
}
return $pass;
}
function changePass( $id, $nick, $email )
{
$id = $this->escapifyString( $id );
$enick = $this->escapifyString($nick);
$eemail = $this->escapifyString( $email );
$ranpass = $this->ranPass();
$pass = $this->escapifyString( $ranpass );
$recip = $email;
$headers = 'MIME-Version: 1.0' . "\n";
$headers .= 'Content-type: text/html; charset=iso-8859-1' . "\n";
$headers .= 'Message-ID: <". time() .rand(1,1000). ">' . "\n";
$headers .= 'From: Support <<EMAIL>>' . "\n";
$subject = 'Password Reset ';
$message = 'Dear ' . $nick . "<br /><br />";
$message .= 'Thank you for resetting your password.' . "<br /><br />";
$message .= 'Your new password is: ' . $ranpass . '<br /><br />';
// Sends email
mail( $recip, $subject, $message, $headers );
echo "emailRight(" . $enick . ")";
return $this->update(array('PASSWORD' => "<PASSWORD>)"),
array('ID' => $id, 'EMAIL' => $eemail, 'NICK' => $enick));
}
function lookupUserDetails( $id )
{
return $this->getSingle( $id );
}
function isUserAdmin( $id )
{
return $this->isUserDataAdmin( $this->lookupUserDetails( $id ) );
}
function isUserDataAdmin( $array )
{
if( !isset( $array[ 'FLAGS' ] ) )
{
die( 'Improper use of isUserDataAdmin. Fix your shit.' );
}
return $array[ 'FLAGS' ] & self::ADMIN_FLAG;
}
function adjustGatheringPoints( $id, $amount )
{
return $this->update( array( 'GATHERING_POINTS' =>
'GATHERING_POINTS+' . $amount ),
array( 'ID' => $id ) );
}
}
?><file_sep><?php
function preconnect_java( )
{
$sock = @stream_socket_client( '127.0.0.1:19191', $errno, $errstr, 0.2,
STREAM_CLIENT_CONNECT | STREAM_CLIENT_PERSISTENT );
if( !$sock )
{
$title = 'Server Error';
$msg = 'An internal server is down. Please try again later.';
$func = '$("#LAD.popup .close_popup").trigger(\'click\');';
echo "genericErrorDialog(\"$title\",\"$msg\",function(){ $func });";
exit( 0 );
}
return $sock;
}
function postwrite_java( $sock )
{
$done = false;
$output = '';
while( !$done )
{
$line = stream_get_line( $sock, 1024, "\n" );
if( $line == 'DONE' || feof( $sock ) )
{
$done = true;
}
else
{
$output .= "$line\n";
}
}
echo $output;
}
if( $action == 'java_run' )
{
$sock = preconnect_java();
$req = $_REQUEST;
$userid = $_SESSION[ 'ID' ];
unset( $req[ 'action' ] );
unset( $req[ '_'] );
unset( $req[ 'end' ] );
unset( $req[ 'userid' ] );
foreach( $req as $key => $value )
{
$text = "$key,$value\n";
fwrite( $sock, $text );
}
fwrite( $sock, "userid,$userid\n" );
fwrite( $sock, "end,transmission\n" );
fflush( $sock );
postwrite_java( $sock );
}
else if( $action == 'java_shutdown' )
{
$sock = preconnect_java();
fwrite( $sock, "end,server\n" );
fflush( $sock );
postwrite_java( $sock );
}
?><file_sep>/**
* Prepares the prototype that many of the other functions interface to. Many
* functions use this.prototype to store data. This function ensures that
* object is properly created.
*/
function prepareThis()
{
if( this.prototype == undefined )
{
this.prototype = {
popupdata: new Array(),
cacheValues: {},
clearRegions: {},
windowClearRegions: new Array(),
cbs: new Array()
}
}
}
/**
* Performs an AJAX request to the server. The action is the first parameter
* and all other parameters should be in an object in the second parameter.
*
* @param actionPara Action parameter to send to the server
* @param outData Additional parameters to send to the server (optional)
* @param popup Popup that may reperform this request when refreshed
*/
function doAjax( actionPara, outData, popup )
{
prepareThis();
// If a popup is able to reperform this query then serialize it in a way
// we will be able to understand later.
if( popup != undefined )
{
var request = "window-" + popup + "-request";
var paras = "window-" + popup + "parameters";
if( actionPara == undefined && outData == undefined )
{
// If both the parameters are undefined then the popup is refreshing
// and we should set the parameters based on the stored values
actionPara = this.prototype.popupdata[ request ];
outData = this.prototype.popupdata[ paras ];
}
else
{
this.prototype.popupdata[ request ] = actionPara;
this.prototype.popupdata[ paras ] = outData;
}
}
// Put all the data into one object
if( outData == undefined || outData == "" )
{
outData = {action: actionPara};
}
else
{
outData[ "action" ] = actionPara;
}
// Perform the AJAX query
$.ajax({
url: "ajaxhandler.php",
data: outData,
dataType: "script"
});
}
/**
* Converts an integer to an IP address (string). Integer is little-endian.
* 0-2^8 bits are 255.xxx.xxx.xxx 2^9-2^16 bits are xxx.256.xxx.xxx, etc.
*
* @param val Value to convert to an IP address
* @return IP address (string)
*/
function intToIP( val )
{
val = toNumber( val );
var ret = "";
var median = val & 255;
ret += median.toString();
val -= median;
val /= 256;
median = val & 255;
ret += "." + median.toString();
val -= median;
val /= 256;
median = val & 255;
ret += "." + median.toString();
val -= median;
val /= 256;
ret += "." + median.toString();
return ret;
}
/**
* Utility function to convert an int value to a process operation (string)
*
* @param val Value to convert to a process operation
* @return String representing the given int's process operation
*/
function intToProcessOperation( val )
{
var nval = toNumber( val );
switch( nval )
{
case 1:
return "Transfer";
case 2:
return "Research";
case 3:
return "Encrypt";
case 4:
return "Decrypt";
case 5:
return "Delete";
case 6:
return "Copy";
case 7:
return "Install";
case 8:
return "Uninstall";
}
alert( "Invalid process operation {" + val + "} with type " + typeof val );
return "";
}
/**
* Utility function to convert an int value to a program type (string)
*
* @param val Value to convert to a program type
* @return String representing the given int's program type
*/
function intToProgramType( val )
{
var nval = toNumber( val );
switch( nval )
{
case 1:
return "Firewall";
case 2:
return "Firewall Bypasser";
case 3:
return "Password";
case 4:
return "Password Breaker";
case 5:
return "Encryptor";
case 6:
return "Decryptor";
case 7:
return "Malware";
}
alert( "Invalid program type {" + val + "} with type " + typeof val );
return "";
}
/**
* Utility function to get a program's size
*
* @param type Type of program to lookup
* @param version Version of the program
* @return Total size of the program
*/
function getProgramSize( type, version )
{
switch( toNumber( type ) )
{
case 1:
return version * 5;
case 2:
return version * 10;
case 3:
return version * 2;
case 4:
return version * 4;
case 5:
return version * 40;
case 6:
return version * 40;
case 7:
return version * 25;
}
}
/**
* Updates the cache and clears out any old values from a given window. Temp
* cache values are only stored in a given state (clear region). Once the
* window leaves that clear region (or is closed) all of the temp cache values
* associated with that region are also cleared out.
*
* @param win Window that has changed its clear region (or closed)
* @param cache New cache region the window has gone to
*/
function updateCache( win, cache )
{
prepareThis();
var old = this.prototype.windowClearRegions[ win ];
// If the region hasn't actually changed, don't do anything
if( old == cache )
{
return;
}
// Set the new clear region
this.prototype.windowClearRegions[ win ] = cache;
var arr = new Array();
var i;
// Iterate over all the temp cache entries and find which ones had the old
// clear region
for( i in this.prototype.clearRegions )
{
if( this.prototype.clearRegions[ i ] == old )
{
arr.push( i );
}
}
// Iterate over the new list and set the new value to undefined (delete it)
for( i in arr )
{
tempCache( arr[ i ] );
}
// This is the only place that we are guaranteed to be called
resetqtip();
}
/**
* Gets the length of a temp cache list.
*
* @param ind Index of the temp cache entry to get the length of
* @return Number of entries in the temp cache list
*/
function getTempCacheListLength( ind )
{
var indstring = getTempCache( ind ).toString();
if( indstring == "" || indstring == undefined )
{
return 0;
}
var elems = indstring.split( "," );
return elems.length;
}
/**
* Adds an item to the temp cache list. Every time this function is called the
* temp cache entry with the given index gets appended with the given value.
* @see removeTempCacheList
*
* @param ind Index in the temp cache
* @param val Value to add to the temp cache entry
* @param clearRegion Region that will cause this list to be cleared out
*/
function addTempCacheList( ind, val, clearRegion )
{
var curr = getTempCache( ind );
if( curr == "" )
{
tempCache( ind, val, clearRegion );
return;
}
var currList = curr.toString().split( "," );
currList.push( val );
var joined = currList.join( "," );
tempCache( ind, joined, clearRegion );
}
/**
* Removes an item from a temp cache list. Temp cache lists are simply
* serialized lists that are stored in the temp cache. If a temp cache already
* exists for the given index the new value is appended to the list.
* @see addTempCacheList
*
* @param ind Index to put into the temp cache
* @param val Value to add to the list
* @param clearRegion Region that will cause this list to be cleared out
*/
function removeTempCacheList( ind, val, clearRegion )
{
var curr = getTempCache( ind );
if( curr == "" )
{
return;
}
var currList = curr.toString().split( "," );
for( var i = 0; i < currList.length; i++ )
{
if( currList[ i ] == val )
{
currList.splice( i, 1 );
tempCache( ind, currList.join( "," ), clearRegion );
}
}
}
/**
* Gets a value from the temp cache
*
* @param ind Index to retrieve from the temp cache. If there is no entry
* then an empty string is returned.
* @return Value in the temp cache for the given index
*/
function getTempCache( ind )
{
var ret = tempCache( ind, 0 );
tempCache( ind, ret );
if( ret == undefined )
{
return "";
}
return ret;
}
/**
* @param ind Index to set
* @param val Value to set
* @param clearRegions The regions that will cause the index to be unset
* @param updateScreen calls Function(ind, val, old) or updates screen with \
* the object that has ID of ind with value of val
*/
function tempCache( ind, val, clearRegions, updateScreen )
{
prepareThis();
if( ind == undefined )
{
alert( "Undefined index for temp cache." );
return 0;
}
ind = ind.toString();
if( val != undefined )
{
val = val.toString();
}
var old = this.prototype.cacheValues[ ind ];
if( val != undefined )
{
this.prototype.cacheValues[ ind ] = val;
}
else
{
delete this.prototype.cacheValues[ ind ];
delete this.prototype.clearRegions[ ind ];
}
if( clearRegions != undefined )
{
this.prototype.clearRegions[ ind ] = clearRegions;
}
if( updateScreen )
{
var obj = $("#" + ind);
if( obj.length )
{
if( typeof updateScreen === "function" )
{
updateScreen( obj, val, old );
}
else if( obj.is( "input" ) )
{
obj.val( val );
}
else
{
obj.html( val );
}
}
}
return old;
}
/**
* Gets a value from the perm cache
*
* @param ind Index to retrieve from the perm cache. If there is no entry
* then an empty string is returned.
* @param def Default value if index isn't found
* @return Value in the perm cache for the given index
*/
function getPermCache( ind, def )
{
var ret = localStorage.getItem( ind );
if( ret == undefined )
{
return def == undefined ? "" : def;
}
return ret;
}
/**
* @param ind Index to set
* @param val Value to set
* @param updateScreen calls Function(ind, val, old) or updates screen with \
* the object that has ID of ind with value of val
*/
function permCache( ind, val, updateScreen )
{
prepareThis();
if( ind == undefined )
{
alert( "Undefined index for perm cache." );
return 0;
}
ind = ind.toString();
if( val != undefined )
{
val = val.toString();
}
var old = localStorage.getItem( ind );
if( val != undefined )
{
localStorage.setItem( ind, val );
}
else
{
localStorage.removeItem( ind );
}
if( updateScreen )
{
var obj = $("#" + ind);
if( obj.length )
{
if( typeof updateScreen === "function" )
{
updateScreen( obj, val, old );
}
else if( obj.is( "input" ) )
{
obj.val( val );
}
else
{
obj.html( val );
}
}
}
return old;
}
/**
* Converts an int into a time string based on the length of the time. Negative
* values are converted to 0. All other values are converted to numbers.
*
* @param val Value to convert to string
* @return String formatted as ##d ##h ##m ##s
*/
function intToTimeString( val )
{
var remain = toNumber( val );
// Calculate seconds, minutes, hours, days?
var seconds = Math.floor( remain % 60 );
remain -= seconds;
remain /= 60;
var minutes = Math.floor( remain % 60 );
remain -= minutes;
remain /= 60;
var hours = Math.floor( remain % 24 );
remain -= hours;
remain /= 24;
var days = Math.floor( remain );
// Construct the string
var output = "";
if( days > 0 )
{
output = days.toString() + "d ";
}
if( hours > 0 || output != "" )
{
output += hours.toString() + "h ";
}
if( minutes > 0 || output != "" )
{
output += minutes.toString() + "m ";
}
output += seconds.toString() + "s ";
return output;
}
/**
* Used in creating a countdown. The first parameter specifies which DOM
* element is being updated. If the last parameter is set to true then every
* second the target time is recalculated from the second parameter otherwise
* it is simply stored. It can be either a function or a value. So long as
* the target time is greater than now, then the countdown decreases. The third
* parameter specifies a unique ID for storing the updater entry. After the
* timer reaches 0 the fourth parameter (function) will be run if it is passed.
* The function is passed the ID and the DOM element as parameters. Fifth
* parameter may be sent as true, if it is all other values are ignored, to
* force all of the ETICs to be reclculated either by the function or the temp
* cache value in parameter two.
*
* @param objectname Name of the object
* @param object Name of the temp cache that has the target time or a
function to calculate it
* @param id Used for calculating when to delete the updater
* @param callback Function to call when completely done
* @param recalc Set to true to recalculate all objects that use functions
*/
function runTimeUpdater( objectname, object, id, callback, recalc )
{
// Performs the actual updates to the DOM element
this.updateItem = function( i ){
var entry = this.values[ i ];
var remain = this.remaining[ i ];
var obj = $("#" + entry);
// If the DOM element has been lost then delete this entry and abort
if( obj.length == 0 )
{
this.deletions[ this.deletions.length ] = i;
return;
}
// Decrement
if( remain > 0 )
{
remain--;
this.remaining[ i ] = remain;
}
// If there is 0 seconds left delete this and run the callback
if( remain == 0 )
{
this.callbacks[ i ]( this.ids[ i ], obj );
this.deletions[ this.deletions.length ] = i;
}
else
{
// Echo out the remaining time to the DOM element
obj.html( intToTimeString( remain ) );
}
};
// Function that gets called every second and simply updates every item
this.actualUpdater = function(){
var i;
this.deletions = new Array();
for( i = 0; i < this.values.length; i++ )
{
this.updateItem( i );
}
};
// Calculates the remaining amount for each object based on its ID
// Guarantueed to return a positive value
this.calculateRemaining = function(object, id){
var targetTime;
if( typeof object == 'function' )
{
targetTime = object( id );
}
else
{
var etic = getTempCache( object ).toString();
var eticObject = new Date();
eticObject.setTime( etic );
targetTime = eticObject.getTime() / 1000;
var timestamp = Date.now() / 1000;
targetTime = ( eticObject.getTime() / 1000 ) - timestamp;
}
return targetTime > 0 ? targetTime : 0;
};
// Forces the ETICs to be recalculated
this.recalculateEtics = function(){
if( this.values == undefined )
{
return;
}
var i;
// Iterate over each value and set the remaining to the new amount
// Also update the item
for( i = 0; i < this.values.length; i++ )
{
if( typeof this.objects[ i ] == 'function' )
{
this.remaining[ i ] =
this.calculateRemaining( this.objects[ i ], this.ids[ i ] );
this.updateItem( i );
}
}
};
this.deletions = new Array();
var i;
// If recalc parameter is true, ignore all other parameters
if( recalc == true )
{
this.recalculateEtics();
return;
}
// If object was undefined then this function is being run from the
// timeout. But it wasn't...so add it to the list.
if( object != undefined )
{
// Stores the name of the object
if( this.values == undefined )
{
this.values = new Array();
}
// Number of seconds remainings
if( this.remaining == undefined )
{
this.remaining = new Array();
}
// The ID of the object, sent to the callback
// Useful for identifying which object it is in the callback
if( this.ids == undefined )
{
this.ids = new Array();
}
// Function to be called when counter reaches 0
if( this.callbacks == undefined )
{
this.callbacks = new Array();
}
// Either a function or the temp cache that calculates the remaining
// seconds
if( this.objects == undefined )
{
this.objects = new Array();
}
this.values[ this.values.length ] = objectname;
this.ids[ this.ids.length ] = id;
this.callbacks[ this.callbacks.length ] = callback;
this.objects[ this.objects.length ] = object;
var secsremaining = this.calculateRemaining( object, id );
this.remaining[ this.remaining.length ] = secsremaining;
if( this.timer == undefined || this.timer == -1 )
{
this.timer = setInterval( "runTimeUpdater();", 1000 );
}
this.updateItem( this.values.length - 1 );
return;
}
// Alright...this function was called from the timeout...or somebody fucked
// up. Perform the updates.
this.actualUpdater();
// If deletions has elements then we need to delete some stuff.
if( this.deletions.length )
{
// Everything needs to be deleted...simply recreate them
if( this.deletions.length == this.values.length )
{
this.values = new Array();
this.remaining = new Array();
this.ids = new Array();
this.callbacks = new Array();
clearInterval( this.timer );
this.timer = -1;
}
else
{
// Specific ones need to be deleted
// This function will delete individual entries in this function
this.deleteItem = function( i ){
var len = this.values.length;
for( ; i < len - 1; i++ )
{
this.values[ i ] = this.values[ i + 1 ];
this.remaining[ i ] = this.remaining[ i + 1 ];
this.ids[ i ] = this.ids[ i ];
this.callbacks[ i ] = this.callbacks[ i + 1 ];
}
};
// Delete deletions from all arrays
var offset = 0;
for( i = 0; i < this.deletions.length; i++ )
{
this.deleteItem( this.deletions[ i ] - offset );
offset++;
this.values.pop();
this.remaining.pop();
this.ids.pop();
this.callbacks.pop();
}
}
}
}
/**
* Forces the window refresh causing the server to be reasked for the index
* page.
*/
function forceRefresh()
{
window.location.reload();
}
/**
* Converts a value to a number (int not the Object). The string/Object should
* be in a workable state because it is simply passed to the Number constructor.
*
* @param val Value to covnert to integer
* @return int representing the value
*/
function toNumber( val )
{
return new Number( val ).valueOf();
}
/**
* Many elements end up getting a 3 part id. Class-id-property (server-1-type)
* This function will return the id portion of the element.
*
* @param obj jQuery Object to extract the ID from
* @return ID of the object
*/
function getSimpleID( obj )
{
var longid = obj.attr( "id" ).toString();
var arr = longid.split( "-" );
return arr[ 1 ];
}
/**
* Adds a script element to the HTML essentially causing it to be loaded.
* The optional callback may be added to be called when the script has finished
* loading.
*
* @param url URL to load from
* @param callback Optional callback to run after the script has been loaded
*/
function addScriptElement( url, callback )
{
// Create the script element
var script = $("<script></script>");
script.attr( 'type', 'text/javascript' );
script.attr( 'src', url );
// If the callback is set, call it after it is loaded
if( callback != undefined )
{
script.load( function(){
eval( callback );
});
}
// Add it to the head
$("head").append( script );
}
/**
* Adds a stylesheet to the HTML essentially causing it to be loaded. The
* optional callback may be added to be called when the stylesheet has finished
* loading.
*
* @param url URL to load from
* @param callback Optional callback to run after the stylesheet has been loaded
*/
function addStylesheet( url, callback )
{
// Create the element
var ss = $("<link />");
ss.attr({
'rel': 'stylesheet',
'type': 'text/css',
'href': url
});
// If the callback is set, call it after it is loaded
if( callback != undefined )
{
ss.load( function(){
eval( callback );
});
}
// Add it to the head
$("head").append( ss );
}
/**
* Extend string to convert to camel case.
*
* @return Returns The Camel Case Version
*/
String.prototype.toCamelCase = function(){
return this.replace(/(?:^|\s)\w/g, function(match){
return match.toUpperCase();
});
};
/**
* Converts any object into a string. Strings themselves are enclosed in
* quotes. If it is a number it is simply returned. Objects are treated as
* arrays with each of their properties being elements. Arrays have each
* element stringified and have commas insert in between each.
*
* @param obj Object to convert to a string
* @return String representation of the object
*/
function stringify(obj) {
var t = typeof obj;
if( t != "object" || obj === null )
{
if( t == "string" )
{
obj = '"' + obj + '"';
}
return String( obj );
}
else
{
var n, v, j = [], arr = ( obj && obj.constructor == Array );
for( n in obj )
{
v = obj[ n ];
t = typeof obj;
if( t == "string" )
{
v = '"' + v + '"';
}
else if( t == "object" && v !== null )
{
v = stringify( v );
}
j.push( ( arr ? "" : '"' + n + '":' ) + String(v) );
}
return (arr ? "[" : "{") + String(j) + (arr ? "]" : "}");
}
}
/**
* Creates an updateable input. Whenever the user has finished updating the
* input it will automatically request the server to update it. Sends an ajax
* request with the action along with two parameters. The first is NAME which
* is the new value of the input. The second puts together the values of the
* two parameters.
*
* @param id ID of the input
* @param val Original value of the input
* @param action Action to send in the ajax call
* @param ajaxpara Name of the custom ajax parameter
* @param ajaxval Name of the custom ajax value
*/
function createUpdateableInput( id, val, action, ajaxpara, ajaxval )
{
return $("<input type='text'>").addClass( "semihidden" )
.attr( "title", "Click to edit" ).attr( "id", id ).val( val )
.hover(function(){
$(this).addClass("semihiddenhover");
}, function(){
$(this).removeClass("semihiddenhover");
}).focus(function(){
$(this).addClass("semihiddenactive");
}).blur(function(){
$(this).removeClass("semihiddenactive");
var oldVal = getTempCache( id );
var newVal = $(this).val();
if( oldVal != newVal )
{
var paras = {};
paras[ "NAME" ] = newVal;
if( ajaxpara != undefined && ajaxval != undefined )
{
paras[ ajaxpara ] = ajaxval;
}
doAjax( action, paras );
}
});
}
/**
* Ensures that all DOM elements (and script elements) that contain the given ID
* are deleted
*
* @param name Name of elements to delete
*/
function deleteAllElementsById( name )
{
var ID = name.replace( /\s/, '_' );
var elem = $("#" + ID);
while( elem.length )
{
elem.remove();
elem = $("#" + ID);
}
$("script[href~='" + ID.toLowerCase() + "']").remove();
}
/**
* Performs cleanup whenever the user leaves the website
*/
function leavingWebsite()
{
$(".close_popup").trigger( 'click' );
}
window.onunload = leavingWebsite;<file_sep>createWindow( "Gather Map" );
createWindow( "Crafting" );
addMenuButton( "Gather Map", "ui-icon-image", requestGatherMap );
addMenuButton( "Crafting", "ui-icon-shuffle", requestCrafting );
function disableModuleCRAFTING()
{
deleteAllElementsById( "Gather Map" );
deleteAllElementsById( "Crafting" );
}
function requestGatherMap()
{
}
function requestCrafting()
{
}<file_sep><?php
/**
* Basic concept: Retrieves a client-side file (JS, CSS, IMG)
*
* Parameters:
* t: Type – J = JS, C = CSS, P = JPG, E = JPEG, N = PNG, S = SVG, G = GIF
* f: File name
* m: mtime of the file
*
* Session vars:
* isAdmin: Used to check if the file is exclusive to admins
*
* 1. Validate the input variables
* 2. Validate the type
* 3. Validate the file can be accessed if not an admin
* 4. Validate the file exists
* 5. Validate the mtime of the file
* 6. Return the file back with a 60-day expiration
* 6a. Handle all JS/CSS files with images/get.php replacement
* 6b. Handle JS files with minifying
* 6c. Cache JS/CSS
*/
require_once( 'private/defs.php' );
// Setup admin only files
$adminOnlyFiles = array( 'js/admin.js', 'css/admin.css' );
/*********************************** STEP 1 ***********************************/
// Make sure we have each request var
if( !isset( $_REQUEST[ 't' ] ) || !isset( $_REQUEST[ 'f' ] ) ||
!isset( $_REQUEST[ 'm' ] ) )
{
die( 'GO AWAY1' );
}
// Make sure there are no special characters so that people don't access files
// they shouldn't
preg_match( '/^[[:alnum:]\.\-\_]*$/i', $_REQUEST[ 'f' ] ) == 1 or
die( 'GO AWAY2' );
/*********************************** STEP 2 ***********************************/
// Make sure the type of file requested is valid
in_array( $_REQUEST[ 't' ], array( 'J', 'C', 'P', 'E', 'N', 'S', 'G' ) ) or
die( 'GO AWAY3' );
$type = $_REQUEST[ 't' ];
$file = $_REQUEST[ 'f' ];
$applicationType = clientfile_getApplicationType( $type );
$cacheFileName = clientfile_getCacheName( $type, $file );
$actualFileName = clientfile_getName( $type, $file );
/*********************************** STEP 3 ***********************************/
if( ( !isset( $_SESSION[ 'isAdmin' ] ) || !$_SESSION[ 'isAdmin' ] ) &&
in_array( $file, $adminOnlyFiles ) )
{
die( 'GO AWAY4' );
}
/*********************************** STEP 4 ***********************************/
// File/Folder does not exist
if( !is_readable( $cacheFileName ) )
{
die( 'GO AWAY5' );
}
/*********************************** STEP 5 ***********************************/
if( filemtime( $actualFileName ) != $_REQUEST[ 'm' ] )
{
die( 'GO AWAY6' );
}
/*********************************** STEP 6 ***********************************/
// Output some headers
header( 'Pragma: public' );
header( 'Cache-Control: max-age=31536000' );
header( "Content-Type: $applicationType" );
header( "Last-Modified: " . date( DateTime::COOKIE, $_REQUEST[ 'm' ] ) );
header( "Expires: " . date( DateTime::COOKIE, time() + (365 * 24 * 60 * 60) ) );
$outBuffer = file_get_contents( $cacheFileName );
// OMG actually write it out...
header( "Content-Length: " . strlen( $outBuffer ) );
echo $outBuffer;
?><file_sep>/**
* Sets up the window and start menu button for options. When the start menu
* button is clicked, @see requestOptions is called.
*/
function initOptions()
{
// Create the options window/menu button
createWindow( "Options" );
addMenuButton( "Options", "ui-icon-wrench", requestOptions );
}
/**
* Queries the server to request the options for the current user.
* @see opt_modules
*/
function requestOptions()
{
doAjax( "opt_request" );
}
/**
* Called to populate the modules that are enabled/disabled. The list of
* enabled modules are avaiable as an array of strings in the first parameter.
* Each disabled module is an array containing the name of the module along with
* the time (int) the module was disabled.
*
* @see requestOptions
* @param enabled Array of modules that are enabled (strings)
* @param disabled Array of modules that are disabled (array(name,time))
*/
function opt_modules( enabled, disabled )
{
var w = getPopupContext( "Options" );
var module, obj, i, timedisabled;
w.html( "" ).append( "<h1>Modules</h1>" );
for( i in enabled )
{
module = enabled[ i ];
obj = createModuleRow( module, true );
w.append( obj );
w.append( "<br />" );
}
for( i in disabled )
{
module = disabled[ i ][ 0 ];
timedisabled = disabled[ i ][ 1 ];
obj = createModuleRow( module, false, timedisabled );
w.append( obj );
w.append( "<br />" );
}
w.append(
$("<button>Submit Changes</button>").css({
"float": "right"
}).click(function(){
var radios = getCheckedModules();
var nenabled = new Array();
var ndisabled = new Array();
radios.each(function(){
var nstatus = $(this).attr( "value" );
var name = $(this).attr( "name" );
if( getTempCache( name + "-enabled" ) != nstatus )
{
tempCache( name + "-enabled", nstatus, "Modules" );
var module = name.slice( name.indexOf( '-' ) + 1 );
if( nstatus == "enabled" )
{
$(this).siblings('input').removeAttr( "title" );
nenabled.push( module );
}
else
{
$(this).attr( "title", "Disabled now." );
eval( "disableModule" + module + "();" );
ndisabled.push( module );
}
}
});
if( nenabled.length )
{
doAjax( "opt_enablemodules", {
MODULES: nenabled.join( "," )
});
}
if( ndisabled.length )
{
doAjax( "opt_disablemodules", {
MODULES: ndisabled.join( "," )
});
}
}).attr( "id", "opt-module-submit" ).button()
).append(
"<div id='opt-modulesenabled-status'></div>" +
"<div id='opt-modulesdisabled-status'></div>"
);
$("#opt-modulesdisabled-status, #opt-modulesenabled-status").css({
"visibility": "hidden",
"display": "inline"
}).addClass( "ui-state-highlight ui-corner-all" );
checkModuleSubmitChanges();
}
/**
* Creates a DOM element based on whether a module is enabled. Also sets a
* temp cache entry based on whether the module is enabled. The returned
* DOM element will consist of a left aligned element that contains the name of
* the module. It will also have two radio buttons for choosing enabled or
* disabled that are right aligned.
*
* @param name Name of the module
* @param enabled Whether the module is enabled
* @param timedisabled If the module is disabled, the time it was disabled
* @return DOM element for dis/enabling the module
*/
function createModuleRow( name, enabled, timedisabled )
{
tempCache( "opt-" + name + "-enabled", enabled ? "enabled" : "disabled",
"Modules" );
return $("<div></div>").append(
$("<div></div>").append( name.toCamelCase() ).css( "float", "left" )
).append(
$( "<input type='radio' />" ).css( "float", "right" )
.attr({
"id": "opt-" + name + "-enabled",
"name": "opt-" + name,
"checked": enabled ? "checked" : undefined,
"value": "enabled"
}).change(function(){checkModuleSubmitChanges();})
).append(
$( "<label>Enabled</label>").attr( "for", "opt-" + name + "-enabled" )
.css( "float", "right" )
).append(
$( "<input type='radio' />" ).css( "float", "right" )
.attr({
"id": "opt-" + name + "-disabled",
"name": "opt-" + name,
"checked": enabled ? undefined : "checked",
"title": enabled ? undefined : "Disabled " +
intToTimeString( ( Date.now() / 1000 ) - timedisabled ) +
" ago.",
"value": "disabled"
}).change(function(){checkModuleSubmitChanges();})
).append(
$( "<label>Disabled</label>").attr( "for", "opt-" + name + "-disabled")
.css( "float", "right" )
);
}
/**
* Gets a jQuery object with all of the radio options that are chacked
*
* @return jQuery object of checked radio options
*/
function getCheckedModules()
{
return $("#Options input[type='radio']" ).filter( ":checked" );
}
/**
* Checks if the module "Submit Changes" button should be enabled/disabled. Each
* of the modules has a temp cache associated with it that defines whether it is
* enabled or not. If any of the temp cache values do not match
*/
function checkModuleSubmitChanges()
{
var radios = getCheckedModules();
var changed = false;
radios.each(function(){
if( getTempCache( $(this).attr( "name" ) + "-enabled" ) !=
$(this).attr( "value" ) )
{
changed = true;
return false;
}
return true;
});
if( changed )
{
$("#opt-module-submit").button( "enable" );
}
else
{
$("#opt-module-submit").button( "disable" );
}
}
/**
* Called when the server has reported that certain modules have been enabled.
*
* @param count The number of modules enabled
*/
function enabledModules( count )
{
$("#opt-modulesenabled-status").html( count + " modules enabled.<br />" )
.css( "visibility", "visible" ).fadeIn( ).delay( 2000 ).fadeOut( "slow" )
.queue(function(){
$(this).css( "visibility", "hidden" );
$(this).dequeue();
});
checkModuleSubmitChanges();
}
/**
* Called when the server has reported that certain modules have been disabled.
*
* @param count The number of modules disabled
*/
function disabledModules( count )
{
$("#opt-modulesdisabled-status").html( count + " modules disabled.<br />" )
.css( "visibility", "visible" ).fadeIn( ).delay( 2000 ).fadeOut( "slow" )
.queue(function(){
$(this).css( "visibility", "hidden" );
$(this).dequeue();
});
checkModuleSubmitChanges();
}<file_sep>// Initialize the window
createWindow( "Admin" );
// Call our temp cache function whenever ajax is done
$("body").ajaxComplete( function(){admin_viewTempCache();} );
// Make sure window.prototype is setup properly
prepareThis();
// Set the window close callback to the temp cache function
this.prototype.cbs[ "windowclose" ] = function(){admin_viewTempCache();};
// Now add the glorified menu button for admin functions
addMenuButton( "Admin", "ui-icon-star", function(){
// Basic tab layout for admin functions
var w = getPopupContext( "Admin" );
w.html( "<div id='admintabs'><ul>" +
"<li><a href='#admintab-Tables'>Tables</a></li>" +
"<li><a href='#admintab-Run_SQL'>Run SQL</a></li>" +
"<li><a href='#admintab-Temp_Cache'>View Temp Cache</a></li>" +
"<li><a href='#admintab-Mx'>Maintenance</a></li>" +
"</ul>" +
"<div id='admintab-Tables'></div>" +
"<div id='admintab-Run_SQL'></div>" +
"<div id='admintab-Temp_Cache'></div>" +
"<div id='admintab-Mx'></div>" +
"</div>" );
// Setup the actual tabs
$('#admintabs').tabs({
select: function(event, ui){
var panel = $(ui.panel);
switch( ui.index )
{
case 0:
panel.html( "" );
doAjax( "a_gettables" );
break;
case 1:
break;
case 2:
panel.html( "" );
admin_viewTempCache( true );
}
return true;
},
create: function(event, ui){
doAjax( "a_gettables" );
return true;
},
idPrefix: "admintab-"
});
// Stylize the tabs a little better
$("#admintabs li a").css( "padding", "0.2em" );
// The SQL tab only deals directly with user input and thus does not
// need to be updated ever, set it up now
var sqltab = $("#admintab-Run_SQL");
sqltab.append( "Select:<br>" ).
append("<input type='text' id='admin_selectsql' style='width:100%'>").
append($("<div>Select</div>").
click(function(){
doAjax( "a_runsqlselect", {
SQL: $("#admin_selectsql").val()
});
}).button()
).
append("<br><br>").
append("<input type='text' id='admin_othersql' style='width:100%'>").
append($("<div>Insert/Update/Delete</div>").
click(function(){
doAjax( "a_runsqlother", {
SQL: $("#admin_othersql").val()
});
}).button()
)
.append("<br><br>Result:<div id='admin_sqlresult'></div>");
// This is the options for the Maintenance Tab
var mxtab = $("#admintab-Mx");
mxtab.append( "<div id='adminmx-status'> </div>" );
mxtab.append( "CSS & JS Cache: " ).append(
$("<div>Clear</div>").
click(function(){
// a workaround for a flaw in the demo system
// (http://dev.jqueryui.com/ticket/4375), ignore!
$( "#dialog:ui-dialog" ).dialog( "destroy" );
$("body").append('<div id="dialog-confirm" ' +
'title="Do you wish to clear the CSS and JS Cache?"><p>These ' +
'items will be permanently deleted and cannot be recovered. ' +
'Are you sure?</p></div>');
$( "#dialog-confirm" ).dialog({
resizable: false,
height:165,
width:360,
modal: true,
buttons: {
"Delete all items": function() {
doAjax( "a_runcssjsclear" );
$( this ).dialog( "close" );
},
Cancel: function() {
$( this ).dialog( "close" );
}
}
});
}).button()
);
// Disabled modules
mxtab.append( "<br />Comma delimited list of modules to disable:" );
mxtab.append( "<input type='text' id='a_disablemodules' size='20'>" );
$("<button></button>").button({
label: "Disable Modules"
}).appendTo( mxtab ).click(function(){
doAjax( "a_disablemodules", {
MODULES: $("#a_disablemodules").val()
});
});
mxtab.append( "<br />Possible values: " + getDefault( "ALL_MODULES" ) );
// Shutdown java server button
mxtab.append( "<br />" );
$("<div>Shutdown Java Server</div>").click(function(){
doAjax( "java_shutdown" );
}).button().appendTo( mxtab );
resizePopup( "Admin" );
});
/**
* Sets up the admin view for an SQL SELECT statement. Set error if an error
* occurred otherwise both headers and table are assumed to be set.
*
* @param headers Headers for the table
* @param table Values for the table
* @param error Set if an error occurred
*/
function admin_selectSQLResult( headers, table, error )
{
var result = $("#admin_sqlresult");
if( error != undefined )
{
result.html( error );
}
else
{
result.html( "" );
result.append( makeSortableTable( headers, table, "admin-sql" ) );
}
}
/**
* Sets up the admin view for a SQL INSERT/UPDATE/DELETE statement. Set error
* if an error occurred otherwise modified is assumed to be set.
*
* @param modified Number of rows that were affected
* @param error String describing what went wrong (typically a MySQL error)
*/
function admin_otherSQLResult( modified, error )
{
var result = $("#admin_sqlresult");
if( error != undefined )
{
result.html( error );
}
else
{
result.html( modified + " rows were affected." );
}
}
/**
* Provides a generic accordion view for all of the tables in the database
* @param tablenames An array with all of the table names
*/
function admin_addTables( tablenames )
{
// Set up the container div
var txt = "<div id='admintableaccordion'>";
for( var i = 0; i < tablenames.length; i++ )
{
// Each table has to be inside a h5 element
var tablename = tablenames[ i ].toLowerCase();
txt += "<h5><a href='#'>" + tablename.toCamelCase() +
"</a></h5><div id='admin_tbl" + tablename + "'></div>";
}
txt += "</div>";
// Add the container div to the view
$("#admintab-Tables").append( txt );
// Prepare it as an accordion, update whenever a view is selected,
// start off initially closed
$("#admintableaccordion").accordion({
active: false,
change: function(event,ui){
var text = ui.newHeader.text().toLowerCase();
if( text == "" )
{
return;
}
doAjax( "a_gettable", {
TABLE: text
});
tempCache( "currentAccordionView", text, "Admin-Tables" );
},
collapsible: true,
clearStyle: true
});
}
/**
* View a specific table in the admin view. The current view is set in the
* currentAccordionView temp cache variable. Pull from it to know where to
* put the resulting table.
*
* @param values Values to populate the table with
*/
function admin_tableView( values )
{
// The headers are the first row, shift them off
var headers = values.shift();
// Get the view and reset it
var view = $("#admin_tbl" + getTempCache( "currentAccordionView" ) );
view.html( "" );
// Simply add the sortable table to the accordion view and resize the view
view.append( makeSortableTable( headers, values, "admin-table", function(){
$("#admintableaccordion").accordion( "resize" );
}));
$("#admintableaccordion").accordion( "resize" );
}
/**
* Call when there are no values in the table to view.
*/
function admin_noTableView()
{
var view = $("#admin_tbl" + getTempCache( "currentAccordionView" ) );
view.html( "No values." );
}
/**
* Function that causes an update of the temp cache table. Since the temp cache
* is used frequently this function is called after every AJAX command has
* finished and whenever a window closes. If the view is visible then the table
* will be updated. The table consists of the key/value/clearRegion for all of
* the temp cache variables excluding the ones required to setup the temp cache.
*
* @param force Force the temp cache table to be regenerated
*/
function admin_viewTempCache( force )
{
// Check if the tab is even visible, if it isn't and force is not set then
// abort
var obj = $("#admintab-Temp_Cache");
if( obj.length != 1 || obj.css( 'display' ) == 'none' && !force )
{
return;
}
// Clear the previous values and sort for the table
tempCache( "admin-tempcache-values", "", "Admin-TempCache" );
tempCache( "admin-tempcache-lastsort", "", "Admin-TempCache" );
// Create our own array of cache values that consists of the
// key/value/clearRegion.
var cacheValues = [];
for( var ind in this.prototype.cacheValues )
{
// Ignore temp cache variables with admin-tempcache in them
if( ind.indexOf( "admin-tempcache" ) == -1 )
{
cacheValues.push( [ ind, this.prototype.cacheValues[ ind ],
this.prototype.clearRegions[ ind ] ] );
}
}
// Reset the view to a sortable table with the array that was just
// generated. Ensure the popup is visible appropriately.
obj.children().remove();
obj.append( makeSortableTable( ["Name", "Value", "Region"],
cacheValues, "admin-tempcache" ));
resizePopup( "Admin" );
}
function admin_setMaintenanceStatus( txt )
{
$('#adminmx-status').html( txt ).addClass( "ui-state-highlight" )
.addClass( "ui-corner-all" );
} | 05fe6c31a8e1c154db54b4958c0ece414d5c1d59 | [
"JavaScript",
"PHP"
] | 29 | PHP | JefferyCReightler/LAD | 22e67cdae1986a8fb06259caff6d67d36d2654e4 | 26b8890d7304dfa2e7240124259de4b9544be896 |
refs/heads/main | <file_sep># ensRepo
[Embedded and Software Lab.](http://eslab.ajou.ac.kr/)
Department of. <i>AI Convergence Network & Electronics and Computer Engineering, Ajou University</i>
San 5 Wonchon-Dong Youngtong-Gu, Suwon, Kyeonggi-Do, 443-749, Korea
## Development
### Objective
- <b>Predictive Maintenance</b> of <b>collaborative robot</b> in <i>smart factory</i>
- Fault Diagnosis
- Anomaly Detection
- Fault Detection
- Failure Prediction
- Health Assessment
### Environment
- Machinery: Collaborative Robot
1) Device #1. <b>6-axis</b> cobot arm "<i>[Niryo-one](https://niryo.com/product/ned/)</i>"

2) Device #2. <b>4-axis</b> robot arm "<i>"[Dobot magician lite](https://www.dobot.cc/magician-lite.html)</i>"

### Database design
- <i>ensdr</i> (embedded and software data repository)
- <b>version information</b>
- release 0.5
- initiate <i>ensdr</i> database (creation of basic table)
<file_sep>-- --------------------------------------------------------
-- 호스트: 172.16.58.3
-- 서버 버전: 10.2.14-MariaDB - mariadb.org binary distribution
-- 서버 OS: Win64
-- HeidiSQL 버전: 9.3.0.4984
-- --------------------------------------------------------
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET NAMES utf8mb4 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-- ensdr 데이터베이스 구조 내보내기
CREATE DATABASE IF NOT EXISTS `ensdr` /*!40100 DEFAULT CHARACTER SET utf8 */;
USE `ensdr`;
-- 테이블 ensdr.angle_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `angle_table` (
`device_id` int(11) unsigned DEFAULT NULL,
`command_idx` int(11) unsigned DEFAULT NULL,
`tick` bigint(13) unsigned DEFAULT NULL,
`tick_idx` bigint(13) unsigned DEFAULT NULL,
`ang_j1` float DEFAULT NULL,
`ang_j2` float DEFAULT NULL,
`ang_j3` float DEFAULT NULL,
`ang_j4` float DEFAULT NULL,
`ang_j5` float DEFAULT NULL,
`ang_j6` float DEFAULT NULL,
KEY `device_id` (`device_id`),
KEY `command_idx` (`command_idx`),
CONSTRAINT `FK_angle_table_commandidx_table` FOREIGN KEY (`command_idx`) REFERENCES `commandidx_table` (`command_idx`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_angle_table_deviceid_table` FOREIGN KEY (`device_id`) REFERENCES `deviceid_table` (`device_id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.bootidx_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `bootidx_table` (
`boot_idx` int(11) unsigned NOT NULL AUTO_INCREMENT,
`eventF_idx` int(11) unsigned DEFAULT NULL,
`device_id` int(11) unsigned DEFAULT NULL,
`bootDate` date DEFAULT NULL,
`bootTime` tinytext DEFAULT NULL,
`bootTick` bigint(20) unsigned DEFAULT NULL,
`offDate` date DEFAULT NULL,
`offTime` tinytext DEFAULT NULL,
`offTick` bigint(20) unsigned DEFAULT NULL,
PRIMARY KEY (`boot_idx`),
KEY `eventF_idx` (`eventF_idx`),
KEY `device_id` (`device_id`),
CONSTRAINT `FK_bootidx_table_deviceid_table` FOREIGN KEY (`device_id`) REFERENCES `deviceid_table` (`device_id`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_bootidx_table_event_table` FOREIGN KEY (`eventF_idx`) REFERENCES `event_table` (`eventF_idx`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_bootidx_table_programidx_table` FOREIGN KEY (`boot_idx`) REFERENCES `programidx_table` (`boot_idx`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.commandidx_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `commandidx_table` (
`command_idx` int(11) unsigned NOT NULL AUTO_INCREMENT,
`program_idx` int(11) unsigned DEFAULT NULL,
`command_id` int(11) unsigned DEFAULT NULL,
`commandStartDate` date DEFAULT NULL,
`commandStartTime` tinytext DEFAULT NULL,
`commandStartTick` bigint(20) unsigned DEFAULT NULL,
`commandEndDate` date DEFAULT NULL,
`commandEndTime` tinytext DEFAULT NULL,
`commandEndTick` bigint(20) unsigned DEFAULT NULL,
`movingTime` int(11) unsigned DEFAULT NULL,
PRIMARY KEY (`command_idx`),
KEY `program_idx` (`program_idx`),
KEY `command_id` (`command_id`),
CONSTRAINT `FK_commandidx_table_commandid_table` FOREIGN KEY (`command_idx`) REFERENCES `commandid_table` (`command_id`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_commandidx_table_programidx_table` FOREIGN KEY (`program_idx`) REFERENCES `programidx_table` (`program_idx`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.commandid_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `commandid_table` (
`command_id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`program_id` int(11) unsigned DEFAULT NULL,
`device_id` int(11) unsigned DEFAULT NULL,
`commandLast` text DEFAULT NULL,
`commandCur` text DEFAULT NULL,
PRIMARY KEY (`command_id`),
KEY `program_id` (`program_id`),
KEY `device_id` (`device_id`),
CONSTRAINT `FK_commandid_table_commandidx_table` FOREIGN KEY (`command_id`) REFERENCES `commandidx_table` (`command_id`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_commandid_table_deviceid_table` FOREIGN KEY (`device_id`) REFERENCES `deviceid_table` (`device_id`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_commandid_table_programid_table` FOREIGN KEY (`program_id`) REFERENCES `programid_table` (`program_id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.current_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `current_table` (
`device_id` int(11) unsigned DEFAULT NULL,
`command_idx` int(11) unsigned DEFAULT NULL,
`tick` bigint(13) unsigned DEFAULT NULL,
`tick_idx` bigint(13) unsigned DEFAULT NULL,
`cur_j1` float DEFAULT NULL,
`cur_j2` float DEFAULT NULL,
`cur_j3` float DEFAULT NULL,
`cur_j4` float DEFAULT NULL,
`cur_j5` float DEFAULT NULL,
`cur_j6` float DEFAULT NULL,
KEY `device_id` (`device_id`),
KEY `command_idx` (`command_idx`),
CONSTRAINT `FK_current_table_commandidx_table` FOREIGN KEY (`command_idx`) REFERENCES `commandidx_table` (`command_idx`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_current_table_deviceid_table` FOREIGN KEY (`device_id`) REFERENCES `deviceid_table` (`device_id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.deviceid_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `deviceid_table` (
`device_id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`deviceName` tinytext DEFAULT NULL,
`deviceType` enum('manipulator','conveyorbelt','controller') DEFAULT NULL,
`devicePicture` text DEFAULT NULL,
PRIMARY KEY (`device_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.event_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `event_table` (
`eventF_idx` int(11) unsigned NOT NULL,
`device_id` int(11) unsigned DEFAULT NULL,
`eventDate` date DEFAULT NULL,
`eventTime` tinytext DEFAULT NULL,
`eventLevel` enum('INFO','WARN','FAULT','DEBUG') DEFAULT NULL,
`eventGroup` enum('MACHINE','FRAMEWORK','CONTROLLER','DETECTOR') DEFAULT NULL,
`eventTick` bigint(13) DEFAULT NULL,
`eventIdt` enum('BOOT','STATE','EVENT','PROGRAM','MOTION','COMMUNICATION','ERROR','ANOMALY','FAULT') DEFAULT NULL,
`eventNote` text DEFAULT NULL,
KEY `eventF_idx` (`eventF_idx`),
KEY `device_id` (`device_id`),
CONSTRAINT `FK_event_table_bootidx_table` FOREIGN KEY (`eventF_idx`) REFERENCES `bootidx_table` (`eventF_idx`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_event_table_deviceid_table` FOREIGN KEY (`device_id`) REFERENCES `deviceid_table` (`device_id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.file_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `file_table` (
`F_idx` int(11) unsigned NOT NULL AUTO_INCREMENT,
`device_id` int(11) unsigned DEFAULT NULL,
`filePath` text DEFAULT NULL,
`fileSize` int(11) DEFAULT NULL,
`startDate` date DEFAULT NULL,
`startTime` tinytext DEFAULT NULL,
`startTick` bigint(13) DEFAULT NULL,
`endDate` date DEFAULT NULL,
`endTime` tinytext DEFAULT NULL,
`endTick` bigint(13) DEFAULT NULL,
PRIMARY KEY (`F_idx`),
KEY `FK_file_table_deviceid_table` (`device_id`),
CONSTRAINT `FK_file_table_deviceid_table` FOREIGN KEY (`device_id`) REFERENCES `deviceid_table` (`device_id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.marker_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `marker_table` (
`device_id` int(11) unsigned DEFAULT NULL,
`command_idx` int(11) unsigned DEFAULT NULL,
`tick` bigint(13) unsigned DEFAULT NULL,
`tick_idx` bigint(13) unsigned DEFAULT NULL,
`pos_x` float DEFAULT NULL,
`pos_y` float DEFAULT NULL,
`pos_z` float DEFAULT NULL,
`marker_id` int(11) NOT NULL,
PRIMARY KEY (`marker_id`),
KEY `device_id` (`device_id`),
KEY `command_idx` (`command_idx`),
CONSTRAINT `FK_marker_table_commandidx_table` FOREIGN KEY (`command_idx`) REFERENCES `commandidx_table` (`command_idx`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_marker_table_deviceid_table` FOREIGN KEY (`device_id`) REFERENCES `deviceid_table` (`device_id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.penel_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `penel_table` (
`device_id` int(11) unsigned DEFAULT NULL,
`command_idx` int(11) unsigned DEFAULT NULL,
`tick` bigint(13) unsigned DEFAULT NULL,
`tick_idx` bigint(13) unsigned DEFAULT NULL,
`penel_x` float DEFAULT NULL,
`penel_y` float DEFAULT NULL,
KEY `device_id` (`device_id`),
KEY `command_idx` (`command_idx`),
CONSTRAINT `FK_penel_table_commandidx_table` FOREIGN KEY (`command_idx`) REFERENCES `commandidx_table` (`command_idx`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_penel_table_deviceid_table` FOREIGN KEY (`device_id`) REFERENCES `deviceid_table` (`device_id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.postion_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `postion_table` (
`device_id` int(11) unsigned DEFAULT NULL,
`command_idx` int(11) unsigned DEFAULT NULL,
`tick` bigint(13) unsigned DEFAULT NULL,
`tick_idx` bigint(13) unsigned DEFAULT NULL,
`pos_x` float DEFAULT NULL,
`pos_y` float DEFAULT NULL,
`pos_z` float DEFAULT NULL,
`roll` float DEFAULT NULL,
`pitch` float DEFAULT NULL,
`yaw` float DEFAULT NULL,
KEY `device_id` (`device_id`),
KEY `command_idx` (`command_idx`),
CONSTRAINT `FK_postion_table_commandidx_table` FOREIGN KEY (`command_idx`) REFERENCES `commandidx_table` (`command_idx`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_postion_table_deviceid_table` FOREIGN KEY (`device_id`) REFERENCES `deviceid_table` (`device_id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.programidx_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `programidx_table` (
`program_idx` int(11) unsigned NOT NULL AUTO_INCREMENT,
`program_id` int(11) unsigned DEFAULT NULL,
`boot_idx` int(11) unsigned NOT NULL,
`programStartDate` date DEFAULT NULL,
`programStartTime` tinytext DEFAULT NULL,
`programStartTick` bigint(13) unsigned DEFAULT NULL,
`programEndDate` date DEFAULT NULL,
`programEndTime` tinytext DEFAULT NULL,
`programEndTick` bigint(13) unsigned DEFAULT NULL,
PRIMARY KEY (`program_idx`),
KEY `program_id` (`program_id`),
KEY `boot_idx` (`boot_idx`),
CONSTRAINT `FK_programidx_table_bootidx_table` FOREIGN KEY (`boot_idx`) REFERENCES `bootidx_table` (`boot_idx`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_programidx_table_commandidx_table` FOREIGN KEY (`program_idx`) REFERENCES `commandidx_table` (`program_idx`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_programidx_table_programid_table` FOREIGN KEY (`program_id`) REFERENCES `programid_table` (`program_id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.programid_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `programid_table` (
`program_id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`programName` tinytext DEFAULT NULL,
`device_id` int(11) unsigned NOT NULL,
PRIMARY KEY (`program_id`),
KEY `device_id` (`device_id`),
CONSTRAINT `FK_programid_table_deviceid_table` FOREIGN KEY (`device_id`) REFERENCES `deviceid_table` (`device_id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.temperature_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `temperature_table` (
`device_id` int(11) unsigned DEFAULT NULL,
`command_idx` int(11) unsigned DEFAULT NULL,
`tick` bigint(13) unsigned DEFAULT NULL,
`tick_idx` bigint(13) unsigned DEFAULT NULL,
`temp_j1` float DEFAULT NULL,
`temp_j2` float DEFAULT NULL,
`temp_j3` float DEFAULT NULL,
`temp_j4` float DEFAULT NULL,
`temp_j5` float DEFAULT NULL,
`temp_j6` float DEFAULT NULL,
KEY `device_id` (`device_id`),
KEY `command_idx` (`command_idx`),
CONSTRAINT `FK_temperature_table_commandidx_table` FOREIGN KEY (`command_idx`) REFERENCES `commandidx_table` (`command_idx`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_temperature_table_deviceid_table` FOREIGN KEY (`device_id`) REFERENCES `deviceid_table` (`device_id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
-- 테이블 ensdr.voltage_table 구조 내보내기
CREATE TABLE IF NOT EXISTS `voltage_table` (
`device_id` int(11) unsigned DEFAULT NULL,
`command_idx` int(11) unsigned DEFAULT NULL,
`tick` bigint(13) unsigned DEFAULT NULL,
`tick_idx` bigint(13) unsigned DEFAULT NULL,
`vol_j1` float DEFAULT NULL,
`vol_j2` float DEFAULT NULL,
`vol_j3` float DEFAULT NULL,
`vol_j4` float DEFAULT NULL,
`vol_j5` float DEFAULT NULL,
`vol_j6` float DEFAULT NULL,
KEY `device_id` (`device_id`),
KEY `command_idx` (`command_idx`),
CONSTRAINT `FK_voltage_table_commandidx_table` FOREIGN KEY (`command_idx`) REFERENCES `commandidx_table` (`command_idx`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `FK_voltage_table_deviceid_table` FOREIGN KEY (`device_id`) REFERENCES `deviceid_table` (`device_id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 내보낼 데이터가 선택되어 있지 않습니다.
/*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */;
/*!40014 SET FOREIGN_KEY_CHECKS=IF(@OLD_FOREIGN_KEY_CHECKS IS NULL, 1, @OLD_FOREIGN_KEY_CHECKS) */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
<file_sep>### Database design
- <i>ensdr</i> (embedded and software data repository)
- <b>version information</b>
- release 0.5
- initiate <i>ensdr</i> database (creation of basic table)
- <b>table specification</b>
- bootidx_table
- commandidx_table
- commandid_table
- deviceid_table
- event_table
- file_table
- programidx_table
- programid_table
- angle_table
- voltage_table
- current_table
- temperature_table
- position_table
- marker_table
- panel_table
| c130470f7d3bdd5d37bca1753a041422473df84b | [
"Markdown",
"SQL"
] | 3 | Markdown | dongs0125/ensRepo | 9980b4711a135e9e431b7852606d622aafb19d65 | ee242f90bd187c223a9aa9040ba917e3c9154735 |
refs/heads/master | <repo_name>wovenpay/wovenpay-android<file_sep>/wovenpay/src/main/java/com/wovenpay/wovenpayments/interfaces/OnTokenRefreshListener.java
package com.wovenpay.wovenpayments.interfaces;
/**
* Created by thatmarc_ on 07-Mar-18.
*/
public interface OnTokenRefreshListener {
void onRefresh(boolean success, String token, String message);
}
<file_sep>/wovenpay/src/main/java/com/wovenpay/wovenpayments/interfaces/AuthComplete.java
package com.wovenpay.wovenpayments.interfaces;
/**
* Created by thatmarc_ on 07-Mar-18.
*/
public interface AuthComplete {
void onComplete(boolean success, String token, String message);
}
<file_sep>/wovenpay/src/main/java/com/wovenpay/wovenpayments/interfaces/OnBusinessListListener.java
package com.wovenpay.wovenpayments.interfaces;
import com.wovenpay.wovenpayments.models.Business;
import java.util.List;
/**
* Created by thatmarc_ on 07-Mar-18.
*/
public interface OnBusinessListListener {
void onComplete(boolean success, List<Business> businesses, String message);
}
<file_sep>/wovenpay/src/main/java/com/wovenpay/wovenpayments/WovenPay.java
package com.wovenpay.wovenpayments;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.wovenpay.wovenpayments.interfaces.AuthComplete;
import com.wovenpay.wovenpayments.interfaces.OnAccountListener;
import com.wovenpay.wovenpayments.interfaces.OnBusinessListListener;
import com.wovenpay.wovenpayments.interfaces.OnBusinessListener;
import com.wovenpay.wovenpayments.interfaces.OnCustomerListener;
import com.wovenpay.wovenpayments.interfaces.OnCustomersListener;
import com.wovenpay.wovenpayments.interfaces.OnDeleteListener;
import com.wovenpay.wovenpayments.interfaces.OnPaymentListener;
import com.wovenpay.wovenpayments.interfaces.OnPlanListener;
import com.wovenpay.wovenpayments.interfaces.OnPlansListener;
import com.wovenpay.wovenpayments.interfaces.OnStatusListener;
import com.wovenpay.wovenpayments.interfaces.OnTokenRefreshListener;
import com.wovenpay.wovenpayments.interfaces.OnTokenVerifyListener;
import com.wovenpay.wovenpayments.interfaces.OnTransactionsListener;
import com.wovenpay.wovenpayments.interfaces.OnWebhookListener;
import com.wovenpay.wovenpayments.interfaces.WovenService;
import com.wovenpay.wovenpayments.models.AccountResponse;
import com.wovenpay.wovenpayments.models.AuthenticateModel;
import com.wovenpay.wovenpayments.models.Business;
import com.wovenpay.wovenpayments.models.CreatePlanResponse;
import com.wovenpay.wovenpayments.models.Customer;
import com.wovenpay.wovenpayments.models.EditBusinessPayload;
import com.wovenpay.wovenpayments.models.GetCustomersResponse;
import com.wovenpay.wovenpayments.models.GetPlanResponse;
import com.wovenpay.wovenpayments.models.ListTransactionsResponse;
import com.wovenpay.wovenpayments.models.Order;
import com.wovenpay.wovenpayments.models.PaymentChargeResponse;
import com.wovenpay.wovenpayments.models.PaymentPayload;
import com.wovenpay.wovenpayments.models.Plan;
import com.wovenpay.wovenpayments.models.TokenResponse;
import com.wovenpay.wovenpayments.models.TransactionStatusResponse;
import com.wovenpay.wovenpayments.models.Webhook;
import java.util.List;
import java.util.concurrent.TimeUnit;
import okhttp3.OkHttpClient;
import okhttp3.ResponseBody;
import okhttp3.logging.HttpLoggingInterceptor;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
/**
* Created by thatmarc_ on 06-Mar-18.
*/
public class WovenPay {
private final String SANDBOX_URL = "http://sandbox.wovenpay.com";
private final String LIVE_URL = "https://api.wovenpay.com";
private Retrofit retrofit;
private WovenService wovenService;
private String url;
private String token;
private int timeout = 30;
private int version = 1;
private String apiKey;
private String apiSecret;
private boolean live = false;
public WovenPay(String apiKey, String apiSecret, boolean live) {
this.apiKey = apiKey;
this.apiSecret = apiSecret;
this.live = live;
Gson gson = new GsonBuilder()
.setLenient()
.create();
HttpLoggingInterceptor interceptor = new HttpLoggingInterceptor();
interceptor.setLevel(HttpLoggingInterceptor.Level.BODY);
OkHttpClient client = new OkHttpClient.Builder()
.readTimeout(timeout, TimeUnit.SECONDS)
.connectTimeout(timeout, TimeUnit.SECONDS)
.addInterceptor(interceptor).build();
this.retrofit = new Retrofit.Builder()
.baseUrl(live ? this.LIVE_URL : this.SANDBOX_URL)
.addConverterFactory(GsonConverterFactory.create(gson))
.client(client)
.build();
this.wovenService = retrofit.create(WovenService.class);
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public int getTimeout() {
return timeout;
}
public void setTimeout(int timeout) {
this.timeout = timeout;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
/**
* Authenticate woven api
*
* @param email Email
* @param password <PASSWORD>
* @param authComplete For callback
*/
public void getAuthorizationToken(String email, String password, final AuthComplete authComplete) {
AuthenticateModel authData = new AuthenticateModel(email, password);
wovenService.authorize(authData).enqueue(new Callback<TokenResponse>() {
@Override
public void onResponse(Call<TokenResponse> call, Response<TokenResponse> response) {
if (response.isSuccessful()) {
authComplete.onComplete(true, response.body().getToken(),
response.message());
} else {
authComplete.onComplete(false, null, response.message());
}
}
@Override
public void onFailure(Call<TokenResponse> call, Throwable t) {
t.printStackTrace();
}
});
}
/**
* Refresh auth token, get a new one
*
* @param onTokenRefreshListener Callback interface method
*/
public void refreshAuthorizationToken(final OnTokenRefreshListener onTokenRefreshListener) {
TokenResponse tokenResponse = new TokenResponse();
tokenResponse.setToken(token);
wovenService.refreshToken(tokenResponse).enqueue(new Callback<TokenResponse>() {
@Override
public void onResponse(Call<TokenResponse> call, Response<TokenResponse> response) {
if (response.isSuccessful()) {
onTokenRefreshListener.onRefresh(true, response.body().getToken(),
null);
return;
}
onTokenRefreshListener.onRefresh(false, null, response.message());
}
@Override
public void onFailure(Call<TokenResponse> call, Throwable t) {
t.printStackTrace();
}
});
}
/**
* Verify token
*
* @param onTokenVerifyListener
*/
public void verifyAuthorizationToken(final OnTokenVerifyListener onTokenVerifyListener) {
TokenResponse tokenResponse = new TokenResponse();
tokenResponse.setToken(token);
wovenService.verifyToken(tokenResponse).enqueue(new Callback<TokenResponse>() {
@Override
public void onResponse(Call<TokenResponse> call, Response<TokenResponse> response) {
if (response.isSuccessful()) {
onTokenVerifyListener.onVerify(true, response.body().getToken(),
null);
return;
}
onTokenVerifyListener.onVerify(false, null, response.message());
}
@Override
public void onFailure(Call<TokenResponse> call, Throwable t) {
}
});
}
/**
* Charge payment aganist a user
*
* @param amount Amount to charge
* @param customerEmail Customer email
* @param method Method of payment, Only accepts 'mobile.mpesa'
* @param mobile Mobile number to charge. If you have used 'mobile.mpesa' as method you have to
* provide a valid mpesa number
* @param orderDescription Order description
* @param reference Reference
* @param onPaymentListener Callback method for when a charge completes
*/
public void chargePayment(double amount, String customerEmail, String method, String mobile,
String orderDescription, String reference,
final OnPaymentListener onPaymentListener) {
PaymentPayload paymentPayload = new PaymentPayload();
paymentPayload.setAmount(amount);
Customer customer = new Customer();
customer.setEmail(customerEmail);
paymentPayload.setCustomer(customer);
paymentPayload.setMethod(method);
paymentPayload.setMobile(mobile);
paymentPayload.setReference(reference);
Order order = new Order();
order.setDescription(orderDescription);
paymentPayload.setOrder(order);
wovenService.chargePayment(getXpayHeader(), paymentPayload).enqueue(new Callback<PaymentChargeResponse>() {
@Override
public void onResponse(Call<PaymentChargeResponse> call, Response<PaymentChargeResponse> response) {
if (response.isSuccessful() && response.body().getStatus().equals("pending")) {
onPaymentListener.onComplete(true, response.body().getTransactionId(), null);
return;
}
if (response.isSuccessful() && response.body().getStatus().equals("failed")) {
onPaymentListener.onComplete(false, response.body().getTransactionId(), response.body().getMetadata().toString());
}
}
@Override
public void onFailure(Call<PaymentChargeResponse> call, Throwable t) {
t.printStackTrace();
}
});
}
/**
* List all transactions made to a business/app
*
* @param onTransactionsListener Callback for when the async job is done
*/
public void transactions(final OnTransactionsListener onTransactionsListener) {
wovenService.listTransactions(getXpayHeader()).enqueue(new Callback<ListTransactionsResponse>() {
@Override
public void onResponse(Call<ListTransactionsResponse> call, Response<ListTransactionsResponse> response) {
if (response.isSuccessful()) {
onTransactionsListener.onComplete(true, response.body().getTransactions(), null);
return;
}
onTransactionsListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<ListTransactionsResponse> call, Throwable t) {
t.printStackTrace();
}
});
}
/**
* Check status of a transaction
*
* @param transactionId Transaction id of the transaction you want to check status for
* @param onStatusListener Callback for when transaction status check is done
*/
public void status(String transactionId, final OnStatusListener onStatusListener) {
wovenService.status(getXpayHeader(), transactionId).enqueue(
new Callback<TransactionStatusResponse>() {
@Override
public void onResponse(Call<TransactionStatusResponse> call, Response<TransactionStatusResponse> response) {
if (response.isSuccessful() && response.code() == 200) {
onStatusListener.onComplete(true, response.body().getStatus(), response.body().getPaymentId(), null);
return;
}
onStatusListener.onComplete(false, null, null, response.body().getErrors().getMessage());
}
@Override
public void onFailure(Call<TransactionStatusResponse> call, Throwable t) {
t.printStackTrace();
onStatusListener.onComplete(false, null, null, t.getLocalizedMessage());
}
}
);
}
/**
* Get account details
*
* @param onAccountListener Callback
*/
public void accountDetails(final OnAccountListener onAccountListener) {
wovenService.accountDetails(getAuthToken()).enqueue(
new Callback<AccountResponse>() {
@Override
public void onResponse(Call<AccountResponse> call, Response<AccountResponse> response) {
if (response.isSuccessful() && response.code() == 200) {
onAccountListener.onComplete(true, response.body(), null);
return;
}
if (response.code() == 401) {
onAccountListener.onComplete(false, null, response.message());
}
}
@Override
public void onFailure(Call<AccountResponse> call, Throwable t) {
t.printStackTrace();
onAccountListener.onComplete(false, null, t.getMessage());
}
}
);
}
/**
* Get all businesses
*
* @param onBusinessListListener Callback
*/
public void getAllBusinesses(final OnBusinessListListener onBusinessListListener) {
wovenService.allBusinesses(getAuthToken()).enqueue(new Callback<List<Business>>() {
@Override
public void onResponse(Call<List<Business>> call, Response<List<Business>> response) {
if (response.isSuccessful() && response.code() == 200) {
onBusinessListListener.onComplete(true, response.body(), null);
return;
}
onBusinessListListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<List<Business>> call, Throwable t) {
t.printStackTrace();
onBusinessListListener.onComplete(false, null, t.getLocalizedMessage());
}
});
}
public void getBusiness(String businessId, final OnBusinessListener onBusinessListener) {
wovenService.getBusiness(getAuthToken(), businessId).enqueue(new Callback<Business>() {
@Override
public void onResponse(Call<Business> call, Response<Business> response) {
if (response.isSuccessful() && response.code() == 200) {
onBusinessListener.onComplete(true, response.body(), null);
return;
}
onBusinessListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<Business> call, Throwable t) {
t.printStackTrace();
onBusinessListener.onComplete(false, null, t.getLocalizedMessage());
}
});
}
public void editBusiness(String businessId, String name, String email, int phoneNumber, String country, final OnBusinessListener onBusinessListener) {
EditBusinessPayload business = new EditBusinessPayload();
business.setName(name);
business.setEmail(email);
business.setPhoneNumber(phoneNumber);
business.setCountry(country);
wovenService.editBusiness(getAuthToken(), businessId, business).enqueue(new Callback<Business>() {
@Override
public void onResponse(Call<Business> call, Response<Business> response) {
if (response.isSuccessful() && response.code() == 200) {
onBusinessListener.onComplete(true, response.body(), null);
return;
}
onBusinessListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<Business> call, Throwable t) {
t.printStackTrace();
onBusinessListener.onComplete(false, null, t.getLocalizedMessage());
}
});
}
public void createCustomer(Customer customer, final OnCustomerListener onCustomerListener) {
wovenService.createCustomer(getAuthToken(), customer).enqueue(new Callback<Customer>() {
@Override
public void onResponse(Call<Customer> call, Response<Customer> response) {
if (response.isSuccessful() && response.code() == 201) {
onCustomerListener.onComplete(true, response.body(), null);
return;
}
if (response.code() == 400) {
onCustomerListener.onComplete(false, null, "Customer with given details already exists.");
return;
}
onCustomerListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<Customer> call, Throwable t) {
t.printStackTrace();
onCustomerListener.onComplete(false, null, t.getLocalizedMessage());
}
});
}
public void editCustomer(String customerId, Customer customer, final OnCustomerListener onCustomerListener) {
wovenService.editCustomer(getAuthToken(), customerId, customer).enqueue(new Callback<Customer>() {
@Override
public void onResponse(Call<Customer> call, Response<Customer> response) {
if (response.isSuccessful() && response.code() == 200) {
onCustomerListener.onComplete(true, response.body(), null);
return;
}
onCustomerListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<Customer> call, Throwable t) {
t.printStackTrace();
onCustomerListener.onComplete(false, null, t.getLocalizedMessage());
}
});
}
public void deleteCustomer(String customerId, final OnDeleteListener onDeleteListener) {
wovenService.deleteCustomer(getAuthToken(), customerId).enqueue(new Callback<ResponseBody>() {
@Override
public void onResponse(Call<ResponseBody> call, Response<ResponseBody> response) {
if (response.isSuccessful() && response.code() == 204) {
onDeleteListener.onComplete(true, "Customer deleted");
return;
}
if (response.code() == 404) {
onDeleteListener.onComplete(false, "Customer not found");
return;
}
onDeleteListener.onComplete(false, response.message());
}
@Override
public void onFailure(Call<ResponseBody> call, Throwable t) {
t.printStackTrace();
onDeleteListener.onComplete(false, t.getLocalizedMessage());
}
});
}
public void getCustomer(String customerId, final OnCustomerListener onCustomerListener) {
wovenService.getCustomer(getAuthToken(), customerId).enqueue(new Callback<Customer>() {
@Override
public void onResponse(Call<Customer> call, Response<Customer> response) {
if (response.isSuccessful() && response.code() == 200) {
onCustomerListener.onComplete(true, response.body(), null);
return;
}
if (response.code() == 404) {
onCustomerListener.onComplete(false, null, "No Customer matches the given query.");
return;
}
onCustomerListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<Customer> call, Throwable t) {
t.printStackTrace();
onCustomerListener.onComplete(false, null, t.getLocalizedMessage());
}
});
}
public void getCustomers(final OnCustomersListener onCustomersListener) {
wovenService.getCustomers(getAuthToken()).enqueue(new Callback<GetCustomersResponse>() {
@Override
public void onResponse(Call<GetCustomersResponse> call, Response<GetCustomersResponse> response) {
if (response.isSuccessful() && response.code() == 200) {
onCustomersListener.onComplete(true, response.body().getResults(), null);
return;
}
onCustomersListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<GetCustomersResponse> call, Throwable t) {
t.printStackTrace();
onCustomersListener.onComplete(false, null, t.getLocalizedMessage());
}
});
}
public void createPlan(final Plan plan,
final OnPlanListener onPlanListener) {
wovenService.createPlan(getAuthToken(), plan).enqueue(new Callback<CreatePlanResponse>() {
@Override
public void onResponse(Call<CreatePlanResponse> call, Response<CreatePlanResponse> response) {
if (response.isSuccessful() && response.code() == 201) {
Plan plan1 = new Plan();
plan1.setBusiness(response.body().getBusiness());
plan1.setName(response.body().getName());
plan1.setPrice(Double.valueOf(response.body().getPrice()));
onPlanListener.onComplete(true, plan1, null);
return;
}
onPlanListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<CreatePlanResponse> call, Throwable t) {
t.printStackTrace();
onPlanListener.onComplete(false, null, t.getLocalizedMessage());
}
});
}
public void getPlans(final OnPlansListener onPlansListener) {
wovenService.getPlans(getAuthToken()).enqueue(new Callback<GetPlanResponse>() {
@Override
public void onResponse(Call<GetPlanResponse> call, Response<GetPlanResponse> response) {
if (response.isSuccessful() && response.code() == 200) {
onPlansListener.onComplete(true, response.body().getPlans(), null);
return;
}
onPlansListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<GetPlanResponse> call, Throwable t) {
t.printStackTrace();
onPlansListener.onComplete(false, null, t.getLocalizedMessage());
}
});
}
public void getPlan(String planId, final OnPlanListener onPlanListener) {
wovenService.getPlan(getAuthToken(), planId).enqueue(new Callback<Plan>() {
@Override
public void onResponse(Call<Plan> call, Response<Plan> response) {
if (response.isSuccessful() && response.code() == 200) {
onPlanListener.onComplete(true, response.body(), null);
return;
}
onPlanListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<Plan> call, Throwable t) {
t.printStackTrace();
onPlanListener.onComplete(false, null, t.getLocalizedMessage());
}
});
}
public void editPlan(String planId, Plan plan, final OnPlanListener onPlanListener) {
wovenService.editPlan(getAuthToken(), planId, plan).enqueue(new Callback<Plan>() {
@Override
public void onResponse(Call<Plan> call, Response<Plan> response) {
if (response.isSuccessful() && response.code() == 200) {
onPlanListener.onComplete(true, response.body(), null);
return;
}
onPlanListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<Plan> call, Throwable t) {
t.printStackTrace();
onPlanListener.onComplete(false, null, t.getLocalizedMessage());
}
});
}
public void deletePlan(String planId, final OnDeleteListener onDeleteListener) {
wovenService.deletePlan(getAuthToken(), planId).enqueue(new Callback<ResponseBody>() {
@Override
public void onResponse(Call<ResponseBody> call, Response<ResponseBody> response) {
if (response.isSuccessful() && response.code() == 204) {
onDeleteListener.onComplete(true, null);
return;
}
onDeleteListener.onComplete(false, response.message());
}
@Override
public void onFailure(Call<ResponseBody> call, Throwable t) {
t.printStackTrace();
onDeleteListener.onComplete(false, t.getLocalizedMessage());
}
});
}
public void createWebhook(final Webhook webhook, final OnWebhookListener onWebhookListener) {
wovenService.createWebhook(getAuthToken(), webhook).enqueue(new Callback<Webhook>() {
@Override
public void onResponse(Call<Webhook> call, Response<Webhook> response) {
if (response.isSuccessful() && response.code() == 201) {
onWebhookListener.onComplete(true, response.body(), null);
return;
}
onWebhookListener.onComplete(false, null, response.message());
}
@Override
public void onFailure(Call<Webhook> call, Throwable t) {
t.getLocalizedMessage();
onWebhookListener.onComplete(false, null, t.getLocalizedMessage());
}
});
}
public void deleteWebhook(String webhookId, final OnDeleteListener onDeleteListener) {
wovenService.deleteWebhook(getAuthToken(), webhookId).enqueue(new Callback<ResponseBody>() {
@Override
public void onResponse(Call<ResponseBody> call, Response<ResponseBody> response) {
if (response.isSuccessful() && response.code() == 204) {
onDeleteListener.onComplete(true, null);
return;
}
onDeleteListener.onComplete(false, response.message());
}
@Override
public void onFailure(Call<ResponseBody> call, Throwable t) {
t.getLocalizedMessage();
onDeleteListener.onComplete(false, t.getLocalizedMessage());
}
});
}
private String getXpayHeader() {
return String.format("%s:%s", this.apiKey, this.apiSecret);
}
private String getAuthToken() {
return String.format("Token %s", this.token);
}
}
<file_sep>/wovenpay/src/main/java/com/wovenpay/wovenpayments/models/Customer.java
package com.wovenpay.wovenpayments.models;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
public class Customer {
@SerializedName("errors")
@Expose
private Errors errors;
@SerializedName("status_code")
@Expose
private Integer statusCode;
@SerializedName("id")
@Expose
private String id;
@SerializedName("metadata")
@Expose
private Object metadata;
@SerializedName("created_at")
@Expose
private String createdAt;
@SerializedName("updated_at")
@Expose
private String updatedAt;
@SerializedName("first_name")
@Expose
private Object firstName;
@SerializedName("last_name")
@Expose
private Object lastName;
@SerializedName("national_id")
@Expose
private Object nationalId;
@SerializedName("phone")
@Expose
private Object phone;
@SerializedName("email")
@Expose
private String email;
@SerializedName("address")
@Expose
private Object address;
@SerializedName("city")
@Expose
private Object city;
@SerializedName("country")
@Expose
private Object country;
@SerializedName("postal_code")
@Expose
private Object postalCode;
public Errors getErrors() {
return errors;
}
public void setErrors(Errors errors) {
this.errors = errors;
}
public Integer getStatusCode() {
return statusCode;
}
public void setStatusCode(Integer statusCode) {
this.statusCode = statusCode;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public Object getMetadata() {
return metadata;
}
public void setMetadata(Object metadata) {
this.metadata = metadata;
}
public String getCreatedAt() {
return createdAt;
}
public void setCreatedAt(String createdAt) {
this.createdAt = createdAt;
}
public String getUpdatedAt() {
return updatedAt;
}
public void setUpdatedAt(String updatedAt) {
this.updatedAt = updatedAt;
}
public Object getFirstName() {
return firstName;
}
public void setFirstName(Object firstName) {
this.firstName = firstName;
}
public Object getLastName() {
return lastName;
}
public void setLastName(Object lastName) {
this.lastName = lastName;
}
public Object getNationalId() {
return nationalId;
}
public void setNationalId(Object nationalId) {
this.nationalId = nationalId;
}
public Object getPhone() {
return phone;
}
public void setPhone(Object phone) {
this.phone = phone;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public Object getAddress() {
return address;
}
public void setAddress(Object address) {
this.address = address;
}
public Object getCity() {
return city;
}
public void setCity(Object city) {
this.city = city;
}
public Object getCountry() {
return country;
}
public void setCountry(Object country) {
this.country = country;
}
public Object getPostalCode() {
return postalCode;
}
public void setPostalCode(Object postalCode) {
this.postalCode = postalCode;
}
}<file_sep>/wovenpay/src/main/java/com/wovenpay/wovenpayments/interfaces/OnWebhookListener.java
package com.wovenpay.wovenpayments.interfaces;
import com.wovenpay.wovenpayments.models.Webhook;
/**
* Created by thatmarc_ on 13-Mar-18.
*/
public interface OnWebhookListener {
void onComplete(boolean success, Webhook webhook, String message);
}
<file_sep>/wovenpay/src/main/java/com/wovenpay/wovenpayments/models/Code.java
package com.wovenpay.wovenpayments.models;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
public class Code {
@SerializedName("error_code")
@Expose
private String errorCode;
@SerializedName("detail_code")
@Expose
private String detailCode;
@SerializedName("docs")
@Expose
private String docs;
public String getErrorCode() {
return errorCode;
}
public void setErrorCode(String errorCode) {
this.errorCode = errorCode;
}
public String getDetailCode() {
return detailCode;
}
public void setDetailCode(String detailCode) {
this.detailCode = detailCode;
}
public String getDocs() {
return docs;
}
public void setDocs(String docs) {
this.docs = docs;
}
}<file_sep>/settings.gradle
include ':app', ':wovenpay'
<file_sep>/wovenpay/src/main/java/com/wovenpay/wovenpayments/interfaces/OnDeleteListener.java
package com.wovenpay.wovenpayments.interfaces;
/**
* Created by thatmarc_ on 10-Mar-18.
*/
public interface OnDeleteListener {
void onComplete(boolean success, String message);
}
<file_sep>/wovenpay/src/main/java/com/wovenpay/wovenpayments/interfaces/OnPlansListener.java
package com.wovenpay.wovenpayments.interfaces;
import com.wovenpay.wovenpayments.models.Plan;
import java.util.List;
/**
* Created by thatmarc_ on 12-Mar-18.
*/
public interface OnPlansListener {
void onComplete(boolean success, List<Plan> planList, String message);
}
<file_sep>/wovenpay/build.gradle
apply plugin: 'com.android.library'
ext {
PUBLISH_GROUP_ID = 'com.wovenpay.wovenpayments'
PUBLISH_ARTIFACT_ID = 'wovenpayments'
PUBLISH_VERSION = '1.0.8'
}
android {
compileSdkVersion 26
defaultConfig {
minSdkVersion 14
targetSdkVersion 26
versionCode 9
versionName "1.0.8"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
testOptions {
unitTests {
returnDefaultValues= true
includeAndroidResources = true
}
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'com.squareup.retrofit2:retrofit:2.3.0'
implementation 'com.squareup.retrofit2:converter-gson:2.3.0'
implementation 'com.squareup.okhttp3:logging-interceptor:3.6.0'
testImplementation 'junit:junit:4.12'
testImplementation 'org.mockito:mockito-core:2.7.6'
testImplementation "org.robolectric:robolectric:3.7.1"
testImplementation 'com.squareup.okhttp3:mockwebserver:3.7.0'
androidTestImplementation 'com.android.support.test:runner:1.0.1'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1'
}
repositories {
mavenCentral()
}
apply from: 'https://raw.githubusercontent.com/blundell/release-android-library/master/android-release-aar.gradle'
<file_sep>/wovenpay/src/main/java/com/wovenpay/wovenpayments/interfaces/OnTransactionsListener.java
package com.wovenpay.wovenpayments.interfaces;
import com.wovenpay.wovenpayments.models.Transaction;
import java.util.List;
/**
* Created by thatmarc_ on 07-Mar-18.
*/
public interface OnTransactionsListener {
void onComplete(boolean success, List<Transaction> transactionList, String message);
}
<file_sep>/app/src/main/java/com/wovenpay/wovenpayments/sample/MainActivity.java
package com.wovenpay.wovenpayments.sample;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import com.google.gson.Gson;
import com.wovenpay.wovenpayments.WovenPay;
import com.wovenpay.wovenpayments.interfaces.AuthComplete;
import com.wovenpay.wovenpayments.interfaces.OnAccountListener;
import com.wovenpay.wovenpayments.interfaces.OnBusinessListListener;
import com.wovenpay.wovenpayments.interfaces.OnBusinessListener;
import com.wovenpay.wovenpayments.interfaces.OnCustomerListener;
import com.wovenpay.wovenpayments.interfaces.OnCustomersListener;
import com.wovenpay.wovenpayments.interfaces.OnDeleteListener;
import com.wovenpay.wovenpayments.interfaces.OnPaymentListener;
import com.wovenpay.wovenpayments.interfaces.OnPlanListener;
import com.wovenpay.wovenpayments.interfaces.OnPlansListener;
import com.wovenpay.wovenpayments.interfaces.OnStatusListener;
import com.wovenpay.wovenpayments.interfaces.OnTokenRefreshListener;
import com.wovenpay.wovenpayments.interfaces.OnTokenVerifyListener;
import com.wovenpay.wovenpayments.interfaces.OnTransactionsListener;
import com.wovenpay.wovenpayments.interfaces.OnWebhookListener;
import com.wovenpay.wovenpayments.models.AccountResponse;
import com.wovenpay.wovenpayments.models.Business;
import com.wovenpay.wovenpayments.models.Customer;
import com.wovenpay.wovenpayments.models.Plan;
import com.wovenpay.wovenpayments.models.Transaction;
import com.wovenpay.wovenpayments.models.Webhook;
import java.util.List;
import java.util.Locale;
public class MainActivity extends AppCompatActivity {
final String apikey = "<KEY>";
final String apisecret = "<KEY>";
final String email = "<EMAIL>";
final String password = "<PASSWORD>";
final String wrongPassword = "<PASSWORD>";
final String testToken = "<KEY>";
final String testBusiness = "bus_B6cDFj4uTz4AuFAUFzPFgm";
final String testPlan = "plan_jpcms2jwkvHbinJHvHygkZ";
String testCustomer = "cus_JBkyB88VbjNhux2PQTivmH";
String planId = "plan_cskgawXtFdEHkYsfYtL6uH";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final TextView tvAuth = findViewById(R.id.tvToken);
Button bAuth = findViewById(R.id.bAuth);
final Button bRefresh = findViewById(R.id.bRefresh);
Button bVerify = findViewById(R.id.bVerify);
Button bCharge = findViewById(R.id.bCharge);
Button bListTransactions = findViewById(R.id.bListTransactions);
Button bStatus = findViewById(R.id.bStatus);
Button bAccount = findViewById(R.id.bAccount);
Button bAllBusinesses = findViewById(R.id.bAllBusinesses);
Button bGetBusiness = findViewById(R.id.bGetBusiness);
Button bEditBusiness = findViewById(R.id.bEditBusiness);
Button bCreateCustomer = findViewById(R.id.bCreateCustomer);
Button bEditCustomer = findViewById(R.id.bEditCustomer);
Button bGetCustomer = findViewById(R.id.bGetCustomer);
Button bGetCustomers = findViewById(R.id.bGetCustomers);
Button bDeleteCustomer = findViewById(R.id.bDeleteCustomer);
Button bGetPlans = findViewById(R.id.bGetPlans);
Button bCreatePlan = findViewById(R.id.bCreatePlan);
Button bGetPlan = findViewById(R.id.bGetPlan);
Button bDeletePlan = findViewById(R.id.bDeletePlan);
Button bEditPlan = findViewById(R.id.bEditPlan);
Button bCreateWebhook = findViewById(R.id.bCreateWebhook);
Button bDeleteWebhook = findViewById(R.id.bDeleteWebhook);
final WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
wovenPay.setVersion(1);
wovenPay.getVersion();
wovenPay.setTimeout(5);
wovenPay.getTimeout();
bAuth.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.getAuthorizationToken(email, password, new AuthComplete() {
@Override
public void onComplete(boolean success, String token, String message) {
if (success) {
wovenPay.setToken(token);
// get set token
wovenPay.getToken();
tvAuth.setText(String.format("Token : %s", token));
return;
}
tvAuth.setText(String.format("Auth failed error %s ", message));
}
});
}
});
bRefresh.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.refreshAuthorizationToken(new OnTokenRefreshListener() {
@Override
public void onRefresh(boolean success, String token, String message) {
if (success) {
wovenPay.setToken(token);
tvAuth.setText(String.format("Refresh Token : %s", token));
return;
}
tvAuth.setText(String.format("Auth token refresh error %s ", message));
}
});
}
});
bVerify.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.verifyAuthorizationToken(new OnTokenVerifyListener() {
@Override
public void onVerify(boolean success, String token, String message) {
if (success) {
wovenPay.setToken(token);
tvAuth.setText(String.format("Verified Token : %s", token));
return;
}
tvAuth.setText(String.format("Auth token verify error %s ", message));
}
});
}
});
bCharge.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.chargePayment(
10,
"<EMAIL>",
"mobile.mpesa",
"+254977777777", // replace this with your mpesa phone number :-)
"Order description",
"Reference",
new OnPaymentListener() {
@Override
public void onComplete(boolean success, String transactionId, String message) {
if (success) {
tvAuth.setText(String.format("Success: %s", transactionId));
return;
}
tvAuth.setText(String.format("Failed: %s\nMessage: %s", transactionId, message));
}
});
}
});
bListTransactions.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.transactions(new OnTransactionsListener() {
@Override
public void onComplete(boolean success, List<Transaction> transactionList, String message) {
if (success) {
tvAuth.setText(String.format(Locale.getDefault(), "Transactions %d", transactionList.size()));
return;
}
tvAuth.setText(String.format("Error: %s", message));
}
});
}
});
bStatus.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.status("txn_272uXW8ZfepxMW3kHyEj7f", new OnStatusListener() {
@Override
public void onComplete(boolean success, String status, String paymentId, String error) {
if (success) {
tvAuth.setText(String.format("Status: %s\nPayment id: %s", status, paymentId));
return;
}
tvAuth.setText(String.format("Error: %s", error));
}
});
}
});
bAccount.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.accountDetails(new OnAccountListener() {
@Override
public void onComplete(boolean success, AccountResponse accountResponse, String message) {
if (success) {
tvAuth.setText(String.format("Account: %s", new Gson().toJson(accountResponse)));
return;
}
tvAuth.setText(String.format("Account Details error: %s", message));
}
});
}
});
bAllBusinesses.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.getAllBusinesses(new OnBusinessListListener() {
@Override
public void onComplete(boolean success, List<Business> businesses, String message) {
if (success) {
tvAuth.setText(String.format("Businesses: %s", new Gson().toJson(businesses)));
return;
}
tvAuth.setText(String.format("Get all business error: %s", message));
}
});
}
});
bGetBusiness.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.getBusiness(testBusiness, new OnBusinessListener() {
@Override
public void onComplete(boolean success, Business business, String message) {
if (success) {
tvAuth.setText(String.format("Business: %s", new Gson().toJson(business)));
return;
}
tvAuth.setText(String.format("Get business error: %s", message));
}
});
}
});
bEditBusiness.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.editBusiness(testBusiness, "business name", "<EMAIL>", 23456789, "KE", new OnBusinessListener() {
@Override
public void onComplete(boolean success, Business business, String message) {
if (success) {
tvAuth.setText(String.format("Edited business: %s", new Gson().toJson(business)));
return;
}
tvAuth.setText(String.format("Edit business error: %s", message));
}
});
}
});
bCreateCustomer.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Customer c = new Customer();
c.setEmail("<EMAIL>");
wovenPay.createCustomer(c, new OnCustomerListener() {
@Override
public void onComplete(boolean success, Customer customerResponse, String message) {
if (success) {
testCustomer = customerResponse.getId();
tvAuth.setText(String.format("Created customer: %s", new Gson().toJson(customerResponse)));
return;
}
tvAuth.setText(String.format("Created customer error: %s", message));
}
});
}
});
bEditCustomer.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Customer customer = new Customer();
customer.setEmail("<EMAIL>");
wovenPay.editCustomer(testCustomer, customer, new OnCustomerListener() {
@Override
public void onComplete(boolean success, Customer customerResponse, String message) {
if (success) {
tvAuth.setText(String.format("Edited customer: %s", new Gson().toJson(customerResponse)));
return;
}
tvAuth.setText(String.format("Error editing customer: %s", message));
}
});
}
});
bGetCustomer.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.getCustomer(testCustomer, new OnCustomerListener() {
@Override
public void onComplete(boolean success, Customer customerResponse, String message) {
if (success) {
tvAuth.setText(String.format("Got customer : %s", new Gson().toJson(customerResponse)));
return;
}
tvAuth.setText(String.format("Get customer error: %s", message));
}
});
}
});
bGetCustomers.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.getCustomers(new OnCustomersListener() {
@Override
public void onComplete(boolean success, List<Customer> customerResponseList, String message) {
if (success) {
tvAuth.setText(String.format("Got customers : %s", new Gson().toJson(customerResponseList)));
return;
}
tvAuth.setText(String.format("Get customers error: %s", message));
}
});
}
});
bDeleteCustomer.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.deleteCustomer(testCustomer, new OnDeleteListener() {
@Override
public void onComplete(boolean success, String message) {
if (success) {
tvAuth.setText("Deleted customer ");
return;
}
tvAuth.setText(String.format("delete customer error: %s", message));
}
});
}
});
bGetPlans.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.getPlans(new OnPlansListener() {
@Override
public void onComplete(boolean success, List<Plan> planList, String message) {
if (success) {
tvAuth.setText(String.format("Plans %s", new Gson().toJson(planList)));
return;
}
tvAuth.setText(String.format("Get plans error %s", message));
}
});
}
});
bCreatePlan.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Plan plan = new Plan();
plan.setPrice(1200.00);
plan.setName("Namejgasddkhfj");
plan.setBusiness(testBusiness);
wovenPay.createPlan(plan, new OnPlanListener() {
@Override
public void onComplete(boolean success, Plan plan, String message) {
if (success) {
planId = plan.getId();
Toast.makeText(getApplicationContext(), plan.getId(), Toast.LENGTH_SHORT).show();
tvAuth.setText(String.format("Plan created %s", new Gson().toJson(plan)));
return;
}
tvAuth.setText(String.format("Plan creation error %s", message));
}
});
}
});
bGetPlan.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.getPlan(planId, new OnPlanListener() {
@Override
public void onComplete(boolean success, Plan plan, String message) {
if (success) {
tvAuth.setText(String.format("Plan %s", new Gson().toJson(plan)));
return;
}
tvAuth.setText(String.format("Get plan error %s", message));
}
});
}
});
bEditPlan.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Plan plan = new Plan();
plan.setName("Name name");
plan.setPrice(1_300.0);
wovenPay.editPlan(planId, plan, new OnPlanListener() {
@Override
public void onComplete(boolean success, Plan plan, String message) {
if (success) {
tvAuth.setText(String.format("Edited plan %s", new Gson().toJson(plan)));
return;
}
tvAuth.setText(String.format("Edited plan error %s", message));
}
});
}
});
bDeletePlan.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.deletePlan(planId, new OnDeleteListener() {
@Override
public void onComplete(boolean success, String message) {
if (success) {
tvAuth.setText("Deleted plan");
return;
}
tvAuth.setText(String.format("delete customer error: %s", message));
}
});
}
});
bCreateWebhook.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Webhook webhook = new Webhook();
webhook.setEvent("customer.created");
webhook.setKey("test");
webhook.setTarget("https://paysstestf.ngrok.com/");
wovenPay.createWebhook(webhook, new OnWebhookListener() {
@Override
public void onComplete(boolean success, Webhook webhook, String message) {
if (success) {
tvAuth.setText(String.format("Created webhook %s", new Gson().toJson(webhook)));
return;
}
tvAuth.setText(String.format("Create webhook error %s", message));
}
});
}
});
bDeleteWebhook.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
wovenPay.deleteWebhook("80", new OnDeleteListener() {
@Override
public void onComplete(boolean success, String message) {
if (success) {
tvAuth.setText("Deleted!");
return;
}
tvAuth.setText(String.format("Delete webhook error", message));
}
});
}
});
}
}
<file_sep>/wovenpay/src/main/java/com/wovenpay/wovenpayments/interfaces/OnTokenVerifyListener.java
package com.wovenpay.wovenpayments.interfaces;
/**
* Created by thatmarc_ on 07-Mar-18.
*/
public interface OnTokenVerifyListener {
void onVerify(boolean success, String token, String message);
}
<file_sep>/README.md
Be one of our first beta testers. submit your email on [this](https://wovenpay.com) form
# wovenpay-android
WE ARE STILL IN ALPHA TESTING
Would you like to know when we open for beta testing ? Submit your email on [this](https://wovenpay.com) form
_Anything may change at any time. The public API should not be considered stable._
Woven Payments Android SDK is an abstraction on top of Woven Payments REST api to help you intergrate Woven effortlessly to your android application. Process paymentsin your android application.
If you intend to implement payments only, such as in an e-commerce application you only need to use the [payments](#payments) resource.
## Table of Contents
- [wovenpay-android](#wovenpay-android)
- [Table of Contents](#table-of-contents)
- [Installation](#installation)
- [Create a new Instance of wovenpay](#create-a-new-instance-of-wovenpay)
- [Get token](#get-token)
- [Add token](#add-token)
- [Add request timeout](#add-request-timeout)
- [Change API version](#change-api-version)
- [Refresh token](#refresh-token)
- [Verify token](#verify-token)
- [Account](#account)
- [Business/Apps](#businessesapps)
- [Get all businesses](#get-all-businesses)
- [Get a specific business](#get-specific-business)
- [Edit a business](#edit-a-business)
- [Payments](#payments)
- [Make payments charge](#make-payments-charge)
- [Get list of Payment transactions](#get-list-of-payment-transactions)
- [Transaction status](#transaction-status)
- [Customer](#customer)
- [Create a new customer](#create-a-new-customer)
- [Edit a customer](#edit-a-customer)
- [Delete a customer](#delete-a-customer)
- [Retrieve all customers](#retrieve-all-customers)
- [Retrieve Specific customer](#retrieve-specific-customer)
- [Plan](#plan)
- [Create a new plan](#create-a-new-plan)
- [Retrieve all plans](#retrieve-all-plans)
- [Retrieve Specific plan](#retrieve-specific-plan)
- [Edit a plan](#edit-a-plan)
- [Delete a plan](#delete-a-plan)
- [Webhook](#webhook)
- [Create a new webhook](#create-a-new-webhook)
- [Delete a webhook](#delete-a-webhook)
- [Todo list](#todo)
## Installation
1. __Gradle__
To download the SDK, add it as a dependency to the application level `build.gradle`
```
dependencies {
compile 'com.wovenpay.wovenpay:see.latest.version'
}
```
2. __Maven__
```xml
<dependency>
<groupId>com.wovenpay.wovenpayments</groupId>
<artifactId>wovenpayments</artifactId>
<version>see.latest.version</version>
<type>pom</type>
</dependency>
```
3. __Ivy__
```xml
<dependency org='com.wovenpay.wovenpayments' name='wovenpayments' rev='see.latest.version'>
<artifact name='wovenpayments' ext='pom' ></artifact>
</dependency>
```
<br/>
__latest version__ : [](https://bintray.com/wovenpay/wovenpay-android/wovenpay-android/_latestVersion)
## Create a new Instance of wovenpay
You need to have an instance of woven to interact with the Woven API.
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
}
```
## Get Token
To obtain an Authorization token. An authorization will be needed when making most of the requests. The obtained token becomes invalid after a while. Another token needs to be obtained when the previous one gets invalidated.
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void Authenticate(){
wovenPay.getAuthorizationToken(email, password, new AuthComplete() {
@Override
public void onComplete(boolean success, String token, String message) {
if (success) {
wovenPay.setToken(token);
tvAuth.setText(String.format("Token : %s", token));
return;
}
tvAuth.setText(String.format("Auth failed error %s ", message));
}
});
}
}
```
## Add Token
After you have obtained a token, set it to your instance of woven.
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
void setToken(String token){
wovenPay.setToken(token);
}
//to get the current token
String token = wovenPay.getToken();
}
```
## Add Request Timeout
The woven SDK uses a timeout of 30 seconds for Retrofit's readTimeout & connectTimeout. To set your custom timeout number for both read and connect timeout all you need is to set the timeout.
```java
class MainActivity extends AppCompatActivity {
public void setTimeout(){
wovenPay.setTimeout(5000);
//get set timeout
int timeout = wovenPay.getTimeout();
}
}
```
## Change API version
```java
class MainActivity extends AppCompatActivity {
public void setVersion(){
wovenPay.setVersion(1);
//get set version
int version = wovenpay.getVersion();
}
}
```
## Refresh token
Authorization tokens expire after a while. If you have the previous auth token, it can be exchanged for a valid one.
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void refreshToken(){
wovenPay.refreshAuthorizationToken(new OnTokenRefreshListener() {
@Override
public void onRefresh(boolean success, String token, String message) {
if(success){
wovenPay.setToken(token);
tvAuth.setText(String.format("Refresh Token : %s", token));
return;
}
tvAuth.setText(String.format("Auth token refresh error %s ", message));
}
});
}
}
```
## Verify token
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void verifyToken(){
wovenPay.verifyAuthorizationToken(new OnTokenVerifyListener() {
@Override
public void onVerify(boolean success, String token, String message) {
if (success) {
wovenPay.setToken(token);
tvAuth.setText(String.format("Verified Token : %s", token));
return;
}
tvAuth.setText(String.format("Auth token verify error %s ", message));
}
});
}
}
```
## Account
Get details of the currently authorized account
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void getAccountDetails(){
wovenPay.accountDetails(new OnAccountListener() {
@Override
public void onComplete(boolean success, AccountResponse accountResponse, String message) {
if (success) {
tvAuth.setText(String.format("Account: %s", new Gson().toJson(accountResponse)));
return;
}
tvAuth.setText(String.format("Account Details error: %s", message));
}
});
}
}
```
## Businesses/Apps
Businesses/Apps are client applications that you intend to be accepted as custom client applications.
### Get all businesses
Use this resource to get a list of all the businesses that are allowed for your account
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void getAllBusinesses(){
wovenPay.getAllBusinesses(new OnBusinessListListener() {
@Override
public void onComplete(boolean success, List<Business> businesses, String message) {
if (success) {
tvAuth.setText(String.format("Businesses: %s", new Gson().toJson(businesses)));
return;
}
tvAuth.setText(String.format("Get all business error: %s", message));
}
});
}
}
```
### Get specific business
This resource can be used to get details about a specific business
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void getBusiness(){
wovenPay.getBusiness(testBusiness, new OnBusinessListener() {
@Override
public void onComplete(boolean success, Business business, String message) {
if (success) {
tvAuth.setText(String.format("Business: %s", new Gson().toJson(business)));
return;
}
tvAuth.setText(String.format("Get business error: %s", message));
}
});
}
}
```
### Edit a business
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void editBusiness(){
wovenPay.editBusiness(testBusiness, "business name", "<EMAIL>", 23456789, "KE", new OnBusinessListener() {
@Override
public void onComplete(boolean success, Business business, String message) {
if (success) {
tvAuth.setText(String.format("Edited business: %s", new Gson().toJson(business)));
return;
}
tvAuth.setText(String.format("Edit business error: %s", message));
}
});
}
}
```
## Payments
If you would like to implement checkout only in your app, you are more suited to use only this resource.
### Make Payments Charge
When you want to make a charge aganist an customer's client amount
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void makePaymentCharge(){
wovenPay.chargePayment(
10,
"<EMAIL>",
"mobile.mpesa",
"+254977777777", // replace this with your mpesa phone number :-)
"Order description",
"Reference",
new OnPaymentListener() {
@Override
public void onComplete(boolean success, String transactionId, String message) {
if (success) {
tvAuth.setText(String.format("Success: %s", transactionId));
return;
}
tvAuth.setText(String.format("Failed: %s\nMessage: %s", transactionId, message));
}
});
}
}
```
### Get list of Payment transactions
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void fetchTransactionList(){
wovenPay.transactions(new OnTransactionsListener() {
@Override
public void onComplete(boolean success, List<Transation> transactionList, String message) {
if (success) {
tvAuth.setText(String.format(Locale.getDefault(), "Transactions %d", transactionList.size()));
return;
}
tvAuth.setText(String.format("Error: %s", message));
}
});
}
}
```
### Transaction Status
Checking for transaction status
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void transactionStatus(){
wovenPay.status("txn_272uXW8ZfepxMW3kHyEj7f", new OnStatusListener() {
@Override
public void onComplete(boolean success, String status, String paymentId, String error) {
if (success) {
tvAuth.setText(String.format("Status: %s\nPayment id: %s", status, paymentId));
return;
}
tvAuth.setText(String.format("Error: %s", error));
}
});
}
}
```
### Customer
Customers are entities who pay for something you intend to sell on your app. Every transaction is linked to a customer, and a customer can have multiple payment transactions
#### Create a new customer
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void createCustomer(){
Customer c = new Customer();
c.setEmail("<EMAIL>");
wovenPay.createCustomer(c, new OnCustomerListener() {
@Override
public void onComplete(boolean success, Customer customerResponse, String message) {
if (success) {
testCustomer = customerResponse.getId();
tvAuth.setText(String.format("Created customer: %s", new Gson().toJson(customerResponse)));
return;
}
tvAuth.setText(String.format("Created customer error: %s", message));
}});
}
}
```
#### Edit a customer
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void createCustomer(){
Customer customer = new Customer();
customer.setEmail("<EMAIL>");
wovenPay.editCustomer(testCustomer, customer, new OnCustomerListener() {
@Override
public void onComplete(boolean success, Customer customerResponse, String message) {
if (success) {
tvAuth.setText(String.format("Edited customer: %s", new Gson().toJson(customerResponse)));
return;
}
tvAuth.setText(String.format("Error editing customer: %s", message));
}
});
}
}
```
#### Delete a customer
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void createCustomer(){
wovenPay.deleteCustomer(testCustomer, new OnDeleteCustomerListener() {
@Override
public void onComplete(boolean success, String message) {
if (success) {
tvAuth.setText("Deleted customer ");
return;
}
tvAuth.setText(String.format("delete customer error: %s", message));
}});
}
}
```
#### Retrieve all customers
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void createCustomer(){
wovenPay.getCustomers(new OnCustomersListener() {
@Override
public void onComplete(boolean success, List<Customer> customerResponseList, String message) {
if (success) {
tvAuth.setText(String.format("Got customers : %s", new Gson().toJson(customerResponseList)));
return;
}
tvAuth.setText(String.format("Get customers error: %s", message));
}
});
}
}
```
#### Retrieve Specific customer
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void createCustomer(){
wovenPay.getCustomer(testCustomer, new OnCustomerListener() {
@Override
public void onComplete(boolean success, Customer customerResponse, String message) {
if (success) {
tvAuth.setText(String.format("Got customer : %s", new Gson().toJson(customerResponse)));
return;
}
tvAuth.setText(String.format("Get customer error: %s", message));
}
});
}
}
```
### Plan
If your app has sale packages, this is the resource you use to create, edit and remove plans.
#### Create a new plan
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void functionName(){
Plan plan = new Plan();
plan.setPrice(1200.00);
plan.setName("Namejgasddkhfj");
plan.setBusiness(testBusiness);
wovenPay.createPlan(plan, new OnPlanListener() {
@Override
public void onComplete(boolean success, Plan plan, String message) {
if (success) {
planId = plan.getId();
Toast.makeText(getApplicationContext(), plan.getId(), Toast.LENGTH_SHORT).show();
tvAuth.setText(String.format("Plan created %s", new Gson().toJson(plan)));
return;
}
tvAuth.setText(String.format("Plan creation error %s", message));
}
});
}
}
```
#### Retrieve all plans
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void functionName(){
Plan plan = new Plan();
plan.setPrice(1200.00);
plan.setName("Namejgasddkhfj");
plan.setBusiness(testBusiness);
wovenPay.createPlan(plan, new OnPlanListener() {
@Override
public void onComplete(boolean success, Plan plan, String message) {
if (success) {
planId = plan.getId();
Toast.makeText(getApplicationContext(), plan.getId(), Toast.LENGTH_SHORT).show();
tvAuth.setText(String.format("Plan created %s", new Gson().toJson(plan)));
return;
}
tvAuth.setText(String.format("Plan creation error %s", message));
}
});
}
}
```
#### Retrieve Specific plan
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void functionName(){
wovenPay.getPlan(planId, new OnPlanListener() {
@Override
public void onComplete(boolean success, Plan plan, String message) {
if (success) {
tvAuth.setText(String.format("Plan %s", new Gson().toJson(plan)));
return;
}
tvAuth.setText(String.format("Get plan error %s", message));
}
});
}
}
```
#### Edit a plan
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void functionName(){
Plan plan = new Plan();
plan.setName("<NAME>");
plan.setPrice(1_300.0);
wovenPay.editPlan(planId, plan, new OnPlanListener() {
@Override
public void onComplete(boolean success, Plan plan, String message) {
if (success) {
tvAuth.setText(String.format("Edited plan %s", new Gson().toJson(plan)));
return;
}
tvAuth.setText(String.format("Edited plan error %s", message));
}
});
}
}
```
#### Delete a plan
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void functionName(){
wovenPay.deletePlan(planId, new OnDeleteListener() {
@Override
public void onComplete(boolean success, String message) {
if (success) {
tvAuth.setText("Deleted plan");
return;
}
tvAuth.setText(String.format("delete customer error: %s", message));
}
});
}
}
```
### Webhook
#### Create a new webhook
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void functionName(){
Webhook webhook = new Webhook();
webhook.setEvent("customer.created");
webhook.setKey("test");
webhook.setTarget("https://paysstestf.ngrok.com/");
wovenPay.createWebhook(webhook, new OnWebhookListener() {
@Override
public void onComplete(boolean success, Webhook webhook, String message) {
if (success) {
tvAuth.setText(String.format("Created webhook %s", new Gson().toJson(webhook)));
return;
}
tvAuth.setText(String.format("Create webhook error %s", message));
}
});
}
}
```
#### Delete a webhook
```java
class MainActivity extends AppCompatActivity {
WovenPay wovenPay = new WovenPay(apikey, apisecret, false);
public void functionName(){
wovenPay.deleteWebhook("80", new OnDeleteListener() {
@Override
public void onComplete(boolean success, String message) {
if (success) {
tvAuth.setText("Deleted!");
return;
}
tvAuth.setText(String.format("Delete webhook error", message));
}
});
}
}
```
### Supported versions
We support android SDK 14 and above. You can checkout our [API documentation](https://developer.wovenpay.com/) for more information.
### Todo
- [x] Authenticate
- [x] Set token
- [x] Refresh token
- [x] Verify token
- [x] Set timeout
- [x] Set version
- [x] Account
- [x] Business/Apps
- [x] Customer
- [x] Plan
- [ ] Subscription
- [x] Payments
- [x] Webhooks
- [x] Publish AAR to jCenter and Maven Central
| c3c33f47abfec797df6218c524eca775851c5be5 | [
"Markdown",
"Java",
"Gradle"
] | 15 | Java | wovenpay/wovenpay-android | 31e101d516e748fee8552d7a87137ab7dd2699ab | b398c8ff164eda5fa6f84d588a14f63c75ccc3eb |
refs/heads/master | <repo_name>Jack-888/codaline_homeworks<file_sep>/weather/weather.rb
require "net/http"
require "uri"
require "rexml/document"
# https://pogoda.yandex.ru/static/cities.xml
# http://export.yandex.ru/weather-ng/forecasts/33487.xml
uri = URI.parse("http://export.yandex.ru/weather-ng/forecasts/33487.xml")
response = Net::HTTP.get_response(uri)
doc = REXML::Document.new(response.body)
city_name = doc.root.attributes['exactname']
time = Time.now
temperature = doc.root.elements['fact/temperature'].text
pogoda = doc.root.elements['fact/weather_type'].text
wind = doc.root.elements['fact/wind_speed'].text
puts "На даний час #{time}, погода в місті #{city_name}"
puts "#{temperature} градус #{pogoda}, вітер #{wind} м/с"
| ef48263c924673aed372cdd1acb5d419cad91a5c | [
"Ruby"
] | 1 | Ruby | Jack-888/codaline_homeworks | 6d48377093c98d45332593b48363d837f7224a5d | 78579192311219dd238aa8b77b1bd0bda6e6b8ab |
refs/heads/master | <repo_name>Havynlam/HavynlamX<file_sep>/wathchMemorandum/src/main/java/watch/app/wear/glassx/cn/watchmemorandum/MainActivity.java
package watch.app.wear.glassx.cn.watchmemorandum;
import android.content.Intent;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Bundle;
import android.support.wearable.activity.WearableActivity;
import android.support.wearable.view.BoxInsetLayout;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ImageButton;
import android.widget.ListView;
import java.util.ArrayList;
import java.util.HashMap;
import watch.app.wear.glassx.cn.watchmemorandum.adapter.MyTaskAdapter;
import watch.app.wear.glassx.cn.watchmemorandum.sqlite.DatabaseHelper;
public class MainActivity extends WearableActivity
{
ArrayList<HashMap<String,String>> showlist,list = Utils.getList();
DatabaseHelper dbHelper =new DatabaseHelper(MainActivity.this,"memorandum_db");
private BoxInsetLayout mContainerView;
private ListView mListView;
private ImageButton mImageButton;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setAmbientEnabled();
initLoadDb();//初始化载入数据库的数据
initView();
initData();
}
private void initLoadDb()
{
list = Utils.getList();
if(list.isEmpty())
loadFromDatabase(list); //先检查缓存,若没有数据再从数据库加载
}
private void initView()
{
mContainerView = (BoxInsetLayout) findViewById(R.id.container);
mListView = (ListView) findViewById(R.id.task);
mImageButton = (ImageButton) findViewById(R.id.addTask);
}
private void initData()
{
mListView.setAdapter(new MyTaskAdapter(getApplicationContext()));
mListView.setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() {
@Override
public boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id)
{
return false;
}
});
mListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id)
{
}
});
mImageButton.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
Utils.DateToMillis(list);
Intent intent = new Intent(MainActivity.this, EditActivity.class);
Bundle b = new Bundle();
b.putString("datetime", "");
b.putString("content", "");
b.putString("alerttime","");
intent.putExtra("android.intent.extra.INTENT", b);
startActivity(intent); //启动转到的Activity
}
});
}
private void loadFromDatabase(ArrayList<HashMap<String,String>> list){
SQLiteDatabase db = dbHelper.getReadableDatabase();
Cursor cursor = db.query("user", new String[] { "datetime", "content","alerttime" }, null,
null, null, null,"datetime desc");
while (cursor.moveToNext()) {
for (int i = 0; i < cursor.getCount(); i++) {
cursor.moveToPosition(i);
String datetime = cursor.getString(0);
String content = cursor.getString(1);
String alerttime = cursor.getString(2);
HashMap<String,String> map = new HashMap<String,String>();
map.put("datetime", datetime);
map.put("content", content);
map.put("alerttime", alerttime);
list.add(map);
}
}
}
@Override
public void onEnterAmbient(Bundle ambientDetails)
{
super.onEnterAmbient(ambientDetails);
updateDisplay();
}
@Override
public void onUpdateAmbient()
{
super.onUpdateAmbient();
updateDisplay();
}
@Override
public void onExitAmbient()
{
updateDisplay();
super.onExitAmbient();
}
private void updateDisplay()
{
if (isAmbient())
{
mContainerView.setBackgroundColor(getResources().getColor(android.R.color.black));
} else
{
mContainerView.setBackground(null);
}
}
}
<file_sep>/watchsedentary/src/main/java/cn/glassx/wear/watch/Sedentary/customView/DragSwitch.java
package cn.glassx.wear.watch.Sedentary.customView;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import cn.glassx.wear.watch.Sedentary.R;
/**
* Created by Havynlam on 15/9/28.
*/
public class DragSwitch extends View implements View.OnTouchListener
{
private Bitmap bg_on;
private Bitmap bg_off;
private Bitmap btn_on;
private Bitmap btn_off;
private boolean nowStatus = false; // 当前的状态
private OnChangedListener listener;
public DragSwitch(Context context) {
super(context);
init();
}
public DragSwitch(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public void init() {
// 载入图片资源
bg_on = BitmapFactory.decodeResource(getResources(), R.mipmap.ic_gongdiandi);
bg_off = BitmapFactory.decodeResource(getResources(), R.mipmap.ic_heidiandi);
btn_on = BitmapFactory.decodeResource(getResources(), R.mipmap.ic_hongdian);
btn_off = BitmapFactory.decodeResource(getResources(), R.mipmap.ic_heidian);
setOnTouchListener(this);
}
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
Matrix matrix = new Matrix();
Paint paint = new Paint();
float x = 0;
// 根据nowStatus设置背景,开或者关状态
if (nowStatus == false) {
canvas.drawBitmap(bg_off, matrix, paint); // 画出关闭时的背景
x = 0;
canvas.drawBitmap(btn_off, x, 0, paint); // 画出关闭时的滑块
} else {
canvas.drawBitmap(bg_on, matrix, paint); // 画出打开时的背景
x = bg_on.getWidth() - btn_on.getWidth();
canvas.drawBitmap(btn_on, x, 0, paint); // 画出开启时的滑块
}
}
@Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN: {
if (event.getX() > bg_off.getWidth() || event.getY() > bg_off.getHeight()) {
return false;
} else {
return true;
}
}
case MotionEvent.ACTION_UP: {
nowStatus = !nowStatus;
if (listener != null) {
listener.onChanged(DragSwitch.this, nowStatus);
}
invalidate(); // 刷新界面
break;
}
case MotionEvent.ACTION_CANCEL:
break;
default:
break;
}
return true;
}
/**
* 设置一个监听,供外部调用
* @param listener
*/
public void setOnChangedListener(OnChangedListener listener) {
this.listener = listener;
}
/**
* 设置滑动开关的初始状态,供外部调用
*/
public void setChecked(boolean checked) {
nowStatus = checked;
}
public boolean isChecked()
{
return nowStatus;
}
public interface OnChangedListener {
void onChanged(DragSwitch dragSwitch, boolean checkState);
}
}<file_sep>/watchsedentary/src/main/java/cn/glassx/wear/watch/Sedentary/MainActivity.java
package cn.glassx.wear.watch.Sedentary;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.wearable.activity.WearableActivity;
import android.support.wearable.view.BoxInsetLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
import java.util.Calendar;
import java.util.Date;
import java.util.TimeZone;
import cn.glassx.wear.watch.Sedentary.customView.DragSwitch;
import cn.glassx.wear.watch.Sedentary.receiver.AlarmReceiver;
public class MainActivity extends WearableActivity
{
private BoxInsetLayout mContainerView;
private TextView mTextView;
private DragSwitch mSwitch;
private RelativeLayout mRelativeLayout;
private SharedPreferences mSharedPreferences;
private boolean mToggle = false;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setAmbientEnabled();
initView();
initData();
}
private void initView()
{
mContainerView = (BoxInsetLayout) findViewById(R.id.container);
mTextView = (TextView) findViewById(R.id.text);
mSwitch = (DragSwitch) findViewById(R.id.sedentarySc);
mRelativeLayout = (RelativeLayout) findViewById(R.id.sedentaryBg);
}
@SuppressWarnings("deprecation")
private void initData()
{
mSharedPreferences = getSharedPreferences("config", MODE_PRIVATE);
final SharedPreferences.Editor edit = mSharedPreferences.edit();
mSwitch.setChecked(mSharedPreferences.getBoolean("mToggle", mToggle));
if (mSharedPreferences.getBoolean("mToggle", mToggle))
{
setStatus(R.color.oBackgroundColor, R.string.oRemind);
setTime();
}else
{
setStatus(R.color.cBackgroundColor, R.string.cRemind);
}
mSwitch.setOnChangedListener(new DragSwitch.OnChangedListener()
{
@Override
public void onChanged(DragSwitch dragSwitch, boolean checkState)
{
if (checkState)
{
mToggle = true;
edit.putBoolean("mToggle", mToggle);
edit.commit();
setStatus(R.color.oBackgroundColor, R.string.oRemind);
dayOfWeek();
} else
{
mToggle = false;
setStatus(R.color.cBackgroundColor, R.string.cRemind);
edit.putBoolean("mToggle", mToggle);
edit.commit();
}
}
});
}
private void setStatus(int mColor, int mText)
{
mRelativeLayout.setBackgroundColor(getResources().getColor(mColor));
mTextView.setText(mText);
}
private void dayOfWeek()
{
Calendar c = Calendar.getInstance();
c.setTimeZone(TimeZone.getTimeZone("GMT+8"));
c.setTime(new Date(System.currentTimeMillis()));
int dayOfWeek = c.get(Calendar.DAY_OF_WEEK);
switch (dayOfWeek)
{
case 1:
//星期天
break;
case 2:
setTime();
break;
case 3:
setTime();
break;
case 4:
setTime();
break;
case 5:
setTime();
break;
case 6:
setTime();
break;
case 7:
break;
}
}
private void setTime()
{
sendBroadcast(setClock(9));
// sendBroadcast( setClock(10));
// sendBroadcast(setClock(11));
// sendBroadcast(setClock(15));
// sendBroadcast( setClock(16));
}
private long setClock(int mHour)
{
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(System.currentTimeMillis());
calendar.setTimeZone(TimeZone.getTimeZone("GMT+8"));
calendar.set(Calendar.HOUR_OF_DAY, mHour);
long setTime = calendar.getTimeInMillis();
return setTime;
}
private void sendBroadcast(long time)
{
Intent intent = new Intent(MainActivity.this, AlarmReceiver.class);
PendingIntent sender = PendingIntent.getBroadcast(MainActivity.this, 0, intent, 0);
AlarmManager manager = (AlarmManager) getSystemService(ALARM_SERVICE);
manager.set(AlarmManager.RTC_WAKEUP, time, sender);
}
}
<file_sep>/wathchMemorandum/src/main/java/watch/app/wear/glassx/cn/watchmemorandum/sqlite/SQLiteUtils.java
package watch.app.wear.glassx.cn.watchmemorandum.sqlite;
import android.content.ContentValues;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import watch.app.wear.glassx.cn.watchmemorandum.UserInfo;
/**
* Created by Havynlam on 15/9/7.
*/
public class SQLiteUtils
{
public static final String DATABASE_NAME = "memorandum_db";
public static final String DATETIME = "datetime";
public static final String CONTENT = "content";
public static final String ALERTTIME= "alerttime";
public SQLiteUtils() {
}
public static DatabaseHelper createDBHelper(Context context) {
DatabaseHelper dbHelper = new DatabaseHelper(context, DATABASE_NAME);
return dbHelper;
}
public void insert(DatabaseHelper dbHelper, UserInfo user) {
ContentValues values = new ContentValues();
values.put(DATETIME, user.getDatetime());
values.put(CONTENT, user.getContent());
values.put(ALERTTIME, user.getAlerttime());
SQLiteDatabase db = dbHelper.getWritableDatabase();
db.insert("user", null, values);
db.close();
}
public void update(DatabaseHelper dbHelper) {
SQLiteDatabase db = dbHelper.getWritableDatabase();
ContentValues values = new ContentValues();
values.put("content", "update");
db.update("user", values, "id=?", new String[]{"1"});
db.close();
}
public void delete(DatabaseHelper dbHelper, String datetime) {
SQLiteDatabase db = dbHelper.getReadableDatabase();
db.execSQL("DELETE FROM user WHERE datetime=" + datetime);
db.close();
}
}
<file_sep>/settings.gradle
include ':mobile', ':wear', ':watchsedentary', 'wathchMemorandum'
| 342edcf60e5e96145dd0e0e2913c545d128d8f57 | [
"Java",
"Gradle"
] | 5 | Java | Havynlam/HavynlamX | 5cfdcd1ebd020680aadbcb885f9ce9199ca29438 | dbcddb2c3f7569ed7a72452f498e475bfdc3718e |
refs/heads/main | <file_sep>import { Scene } from "phaser";
import { TitleScene } from "../scenes/titleScene";
export class SceneManager {
static scene:Phaser.Scene;
public static init(s:Scene):void{
this.scene=s;
}
public static start(sceneName:string,nowScene :Scene){
this.scene=nowScene;
this.scene.scene.start(sceneName);
}
public static set(sceneName:string,s:Function|Phaser.Scene,nowScene: Scene){
if(this.scene.scene.get(sceneName)==null)
this.scene.scene.add(sceneName,s,false);
this.start(sceneName,nowScene);
}
}
<file_sep># UM2021Summer
## 起動
```npm run dev```
## 環境構築
- https://tech.e3factory.com/programming/2551<file_sep>import { KeyManager } from "../utils/keymanager";
import { SceneManager } from "../utils/sceneManager";
import { TestScene } from "./testScene";
export class TitleScene extends Phaser.Scene {
constructor() {
//識別ID設定のみ
super({
key: "TitleScene"
});
}
init():void{
console.log("init : titleScene");
SceneManager.init(this);
KeyManager.init(this);
KeyManager.replaceKey("DOWN","S");
KeyManager.replaceKey("UP","W");
KeyManager.replaceKey("LEFT","A");
KeyManager.replaceKey("RIGHT","D");
}
//本来はこのメソッドで、画像ファイルなどのロード
preload(): void {
//今回はコンソール表示だけ
console.log("Hello Phaser");
}
private startText?: Phaser.GameObjects.Text // 追加
private ellipse?: Phaser.GameObjects.Ellipse // 追加
private bk_color: string = '0xeeeeee' // 追加
private fontStyle: Phaser.Types.GameObjects.Text.TextStyle = { color: 'red', fontSize: '70px' } //追加
private static num:integer =0
create():void {
this.cameras.main.setBackgroundColor(this.bk_color)
this.startText = this.add.text(parseInt(this.game.config.width.toString())/2, parseInt(this.game.config.height.toString())/2, ('Title'+TitleScene.num++), this.fontStyle)
this.startText.setOrigin(0.5)
this.startText.setInteractive()
this.startText
this.startText.on('pointerdown', () => {
//this.scene.start('TitleScene')
SceneManager.set("TestScene",TestScene,this);
console.log("clicked");
})
this.ellipse=this.add.ellipse(0,0,100,100,0x00ff00);
}
update():void{
if(KeyManager.isDown("UP")){
this.ellipse.y-=5;
}
if(KeyManager.isDown("DOWN")){
this.ellipse.y+=5;
}
if(KeyManager.isDown("RIGHT")){
this.ellipse.x+=5;
}
if(KeyManager.isDown("LEFT")){
this.ellipse.x-=5;
}
}
}<file_sep>import { Scene } from "phaser";
export class KeyManager {
static keys:Map<string,Phaser.Input.Keyboard.Key>;
static A:Phaser.Input.Keyboard.Key;
static B:Phaser.Input.Keyboard.Key;
static C:Phaser.Input.Keyboard.Key;
static D:Phaser.Input.Keyboard.Key;
static E:Phaser.Input.Keyboard.Key;
static F:Phaser.Input.Keyboard.Key;
static G:Phaser.Input.Keyboard.Key;
static H:Phaser.Input.Keyboard.Key;
static I:Phaser.Input.Keyboard.Key;
static J:Phaser.Input.Keyboard.Key;
static K:Phaser.Input.Keyboard.Key;
static L:Phaser.Input.Keyboard.Key;
static M:Phaser.Input.Keyboard.Key;
static N:Phaser.Input.Keyboard.Key;
static O:Phaser.Input.Keyboard.Key;
static P:Phaser.Input.Keyboard.Key;
static Q:Phaser.Input.Keyboard.Key;
static R:Phaser.Input.Keyboard.Key;
static S:Phaser.Input.Keyboard.Key;
static T:Phaser.Input.Keyboard.Key;
static U:Phaser.Input.Keyboard.Key;
static V:Phaser.Input.Keyboard.Key;
static W:Phaser.Input.Keyboard.Key;
static X:Phaser.Input.Keyboard.Key;
static Y:Phaser.Input.Keyboard.Key;
static Z:Phaser.Input.Keyboard.Key;
static UP:Phaser.Input.Keyboard.Key;
static DOWN:Phaser.Input.Keyboard.Key;
static LEFT:Phaser.Input.Keyboard.Key;
static RIGHT:Phaser.Input.Keyboard.Key;
static SPACE:Phaser.Input.Keyboard.Key;
static ESC:Phaser.Input.Keyboard.Key;
static BACKSPACE:Phaser.Input.Keyboard.Key;
static SHIFT:Phaser.Input.Keyboard.Key;
static ENTER:Phaser.Input.Keyboard.Key;
static init(scene : Scene):void{
console.log("init");
this.keys=new Map<string,Phaser.Input.Keyboard.Key>();
this.A = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.A);
this.keys.set("A",this.A);
this.B = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.B);
this.keys.set("B",this.B);
this.C = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.C);
this.keys.set("C",this.C);
this.D = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.D);
this.keys.set("D",this.D);
this.E = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.E);
this.keys.set("E",this.E);
this.F = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.F);
this.keys.set("F",this.F);
this.G = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.G);
this.keys.set("G",this.G);
this.H = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.H);
this.keys.set("H",this.H);
this.I = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.I);
this.keys.set("I",this.I);
this.J = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.J);
this.keys.set("J",this.J);
this.K = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.K);
this.keys.set("K",this.K);
this.L = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.L);
this.keys.set("L",this.L);
this.M = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.M);
this.keys.set("M",this.M);
this.N = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.N);
this.keys.set("N",this.N);
this.O = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.O);
this.keys.set("O",this.O);
this.P = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.P);
this.keys.set("P",this.P);
this.Q = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.Q);
this.keys.set("Q",this.Q);
this.R = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.R);
this.keys.set("R",this.R);
this.S = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.S);
this.keys.set("S",this.S);
this.T = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.T);
this.keys.set("T",this.T);
this.U = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.U);
this.keys.set("U",this.U);
this.V = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.V);
this.keys.set("V",this.V);
this.W = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.W);
this.keys.set("W",this.W);
this.X = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.X);
this.keys.set("X",this.X);
this.Y = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.Y);
this.keys.set("Y",this.Y);
this.Z = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.Z);
this.keys.set("Z",this.Z);
this.UP = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.UP);
this.keys.set("UP",this.UP);
this.DOWN = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.DOWN);
this.keys.set("DOWN",this.DOWN);
this.RIGHT = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.RIGHT);
this.keys.set("RIGHT",this.RIGHT);
this.LEFT = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.LEFT);
this.keys.set("LEFT",this.LEFT);
this.SPACE = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.SPACE);
this.keys.set("SPACE",this.SPACE);
this.ESC = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.ESC);
this.keys.set("ESC",this.ESC);
this.BACKSPACE = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.BACKSPACE);
this.keys.set("BACKSPACE",this.BACKSPACE);
this.SHIFT = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.SHIFT);
this.keys.set("SHIFT",this.SHIFT);
this.ENTER = scene.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.ENTER);
this.keys.set("ENTER",this.ENTER);
}
public static isDown(key:string):boolean{
if(this.keys.has(key))
return this.keys.get(key).isDown;
console.log("指定されたキーが見つかりません : "+key);
return false;
}public static isUp(key:string):boolean{
if(this.keys.has(key))
return this.keys.get(key).isUp;
console.log("指定されたキーが見つかりません : "+key);
return false;
}
public static replaceKey(key1:string,key2:string):boolean{
if(this.keys.has(key1)&&this.keys.has(key2)){
let key01=this.getButton(key1);
let key02=this.getButton(key2);
this.keys.delete(key1);
this.keys.delete(key2);
this.keys.set(key1,key02);
this.keys.set(key2,key01);
return true;
}
return false;
}
public static getButton(key:string):Phaser.Input.Keyboard.Key{
if(this.keys.has(key))return this.keys.get(key);
console.log("指定されたキーが見つかりません : "+key);
return null;
}
}<file_sep>//必要なimport
import "phaser";
import { TitleScene } from "./scenes/titleScene";
//ゲームの基本設定
const config: Phaser.Types.Core.GameConfig = {
title: "Title", //タイトル
version: "0.0.1", //バージョン
width: 1280, //画面幅
height: 720, //画面高さ
parent:"game", //DOM上の親
type: Phaser.AUTO, //canvasかwebGLかを自動選択
scene: [TitleScene] //利用するSceneクラス
};
//ゲームメインのクラス
export class Game extends Phaser.Game{
constructor(config: Phaser.Types.Core.GameConfig) {
super(config);
}
}
//windowイベントで、ロードされたらゲーム開始
window.addEventListener("load", () => {
var game = new Game(config);
});<file_sep>import { KeyManager } from "../utils/keymanager";
import { SceneManager } from "../utils/sceneManager";
import { TitleScene } from "./titleScene";
export class TestScene extends Phaser.Scene {
constructor() {
//識別ID設定のみ
super({
key: "TestScene"
});
}
init():void{
console.log("init : testScene");
}
//本来はこのメソッドで、画像ファイルなどのロード
preload(): void {
//今回はコンソール表示だけ
console.log("Hello Phaser");
}
private startText?: Phaser.GameObjects.Text // 追加
private ellipse?: Phaser.GameObjects.Ellipse // 追加
private bk_color: string = '0xeeeeee' // 追加
private fontStyle: Phaser.Types.GameObjects.Text.TextStyle = { color: 'red', fontSize: '70px' } //追加
private static num:integer =0
create():void {
this.cameras.main.setBackgroundColor(this.bk_color)
this.startText = this.add.text(parseInt(this.game.config.width.toString())/2, parseInt(this.game.config.height.toString())/2, ('Test'+TestScene.num++), this.fontStyle)
this.startText.setOrigin(0.5)
this.startText.setInteractive()
this.startText
this.startText.on('pointerdown', () => {
SceneManager.set("TitleScene",TitleScene,this);
})
this.ellipse=this.add.ellipse(0,0,100,100,0x00ff00);
}
update():void{
if(KeyManager.isDown("UP")){
this.ellipse.y-=5;
}
if(KeyManager.isDown("DOWN")){
this.ellipse.y+=5;
}
if(KeyManager.isDown("LEFT")){
this.ellipse.x+=5;
}
if(KeyManager.isDown("RIGHT")){
console.log("right");
this.ellipse.x-=5;
}
}
} | 8530ce8f8a7da9453520e42dbd4085d320e68ea9 | [
"Markdown",
"TypeScript"
] | 6 | TypeScript | UenoMizuki/UM2021Summer | 964535732c13313a1749850d053dd0758555a534 | 34ac286d3f19f2ec44a1c16e9d15e084b2de5415 |
refs/heads/main | <repo_name>AndreLeEEEEEE/calculator<file_sep>/README.md
# calculator
It's a calculator.
<file_sep>/script.js
const add = (x, y) => x + y;
const subtract = (x, y) => x - y;
const multiply = (x, y) => x * y;
const divide = function(x, y) {
if (y === 0) {
alert("Error: Don't divide by 0");
clearDisplay();
return;
}
return x / y;
}
function operate(currentOperator) {
if (storedOperator) {
switch(storedOperator) {
case "+":
updateDisplay(add(storedValue, getDisplay()), true);
break;
case "-":
updateDisplay(subtract(storedValue, getDisplay()), true);
break;
case "*":
updateDisplay(multiply(storedValue, getDisplay()), true);
break;
case "/":
updateDisplay(divide(storedValue, getDisplay()), true);
break;
default:
alert("Unreached");
}
// Used for expressions with more than one operator
if (currentOperator !== null) {equalizeValues(currentOperator);}
// Used when the equal button is clicked
else {storedOperator = null;}
}
// Used on the first operator
else {equalizeValues(currentOperator);}
// Ensures that the next number will replace the current display number
isNumber = false;
}
function pressEqual() {
// Make sure equal isn't clicked when the number to operator ratio is wrong
if (storedOperator && isNumber) {
operate();
}
}
function pressNumber(numberKey) {
// If true, build a number, if false, replace the display value first
isNumber ? updateDisplay(numberKey) : updateDisplay(numberKey, true);
}
function clearDisplay() {
updateDisplay("", true);
storedValue = null;
storedOperator = null;
}
function updateDisplay(displayValue, replaceDisplay = false) {
if (!replaceDisplay) {
let tempDisplay = document.querySelector("#display").textContent;
displayValue = tempDisplay.toString() + displayValue.toString();
}
// Allows for a new number to be built after replacement
isNumber = true;
// Round to 2 decimal places if there are any
displayValue = Math.round(displayValue * 100) / 100;
document.querySelector("#display").textContent = displayValue;
}
function getDisplay() {
return Number(document.querySelector("#display").textContent);
}
function equalizeValues(currentOperator) {
storedOperator = currentOperator;
storedValue = getDisplay();
}
// Will be hoisted to the top on file execution
let storedValue = null;
let storedOperator = null;
let isNumber = true; | d707a28ce90c1eb4683e5d2caa424c95641ae880 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | AndreLeEEEEEE/calculator | a126e3018271a6b513433e85a004b3b21e9c50b9 | 33bd2b3786d5479c8bc6b1a7aa6c167728a76deb |
refs/heads/master | <file_sep>exports.up = function(knex) {
try {
const updateQuery = `UPDATE person SET name = alias`
return knex.raw(updateQuery)
} catch(err) {
console.log('err in migration', err)
}
};
exports.down = function() {};<file_sep>exports.up = function(knex) {
return knex.schema.table('person', table => {
table.dropColumns("firstName", "lastName", "alias")
});
};
exports.down = function(knex) {}; | 0b0f5680e76de09940829ffd24dfe0d6b46fb8b3 | [
"JavaScript"
] | 2 | JavaScript | LuigiLegion/meirim | 032e53a776164ad1c234feac65891b565214ce39 | b735778228dedf39567fb9b6722fac8986bd38f1 |
refs/heads/master | <file_sep># Copyright 2015 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABCMeta
from numbers import Integral
import numpy as np
import sqlite3
from sqlite3 import Row
import warnings
from logbook import Logger
import pandas as pd
from pandas.tseries.tools import normalize_date
from six import with_metaclass, string_types
from zipline.errors import (
ConsumeAssetMetaDataError,
InvalidAssetType,
MultipleSymbolsFound,
RootSymbolNotFound,
SidAssignmentError,
SidNotFound,
SymbolNotFound,
MapAssetIdentifierIndexError,
)
from zipline.assets._assets import (
Asset, Equity, Future
)
log = Logger('assets.py')
# Expected fields for an Asset's metadata
ASSET_FIELDS = [
'sid',
'asset_type',
'symbol',
'root_symbol',
'asset_name',
'start_date',
'end_date',
'first_traded',
'exchange',
'notice_date',
'expiration_date',
'contract_multiplier',
# The following fields are for compatibility with other systems
'file_name', # Used as symbol
'company_name', # Used as asset_name
'start_date_nano', # Used as start_date
'end_date_nano', # Used as end_date
]
# Expected fields for an Asset's metadata
ASSET_TABLE_FIELDS = [
'sid',
'symbol',
'asset_name',
'start_date',
'end_date',
'first_traded',
'exchange',
]
# Expected fields for an Asset's metadata
FUTURE_TABLE_FIELDS = ASSET_TABLE_FIELDS + [
'root_symbol',
'notice_date',
'expiration_date',
'contract_multiplier',
]
EQUITY_TABLE_FIELDS = ASSET_TABLE_FIELDS
# Create the query once from the fields, so that the join is not done
# repeatedly.
FUTURE_BY_SID_QUERY = 'select {0} from futures where sid=?'.format(
", ".join(FUTURE_TABLE_FIELDS))
EQUITY_BY_SID_QUERY = 'select {0} from equities where sid=?'.format(
", ".join(EQUITY_TABLE_FIELDS))
class AssetFinder(object):
def __init__(self,
metadata=None,
allow_sid_assignment=True,
fuzzy_char=None,
db_path=':memory:',
create_table=True):
self.fuzzy_char = fuzzy_char
# This flag controls if the AssetFinder is allowed to generate its own
# sids. If False, metadata that does not contain a sid will raise an
# exception when building assets.
self.allow_sid_assignment = allow_sid_assignment
if allow_sid_assignment:
self.end_date_to_assign = normalize_date(
pd.Timestamp('now', tz='UTC'))
self.conn = sqlite3.connect(db_path)
self.conn.text_factory = str
self.cursor = self.conn.cursor()
# The AssetFinder also holds a nested-dict of all metadata for
# reference when building Assets
self.metadata_cache = {}
# Create table and read in metadata.
# Should we use flags like 'r', 'w', instead?
# What we need to support is:
# - A 'throwaway' mode where the metadata is read each run.
# - A 'write' mode where the data is written to the provided db_path
# - A 'read' mode where the asset finder uses a prexisting db.
if create_table:
self.create_db_tables()
if metadata is not None:
self.consume_metadata(metadata)
# Cache for lookup of assets by sid, the objects in the asset lookp may
# be shared with the results from equity and future lookup caches.
#
# The top level cache exists to minimize lookups on the asset type
# routing.
#
# The caches are read through, i.e. accessing an asset through
# retrieve_asset, _retrieve_equity etc. will populate the cache on
# first retrieval.
self._asset_cache = {}
self._equity_cache = {}
self._future_cache = {}
self._asset_type_cache = {}
# Populated on first call to `lifetimes`.
self._asset_lifetimes = None
def create_db_tables(self):
c = self.conn.cursor()
c.execute("""
CREATE TABLE equities(
sid integer,
symbol text,
asset_name text,
start_date integer,
end_date integer,
first_traded integer,
exchange text,
fuzzy text
)""")
c.execute('CREATE INDEX equities_sid on equities(sid)')
c.execute('CREATE INDEX equities_symbol on equities(symbol)')
c.execute('CREATE INDEX equities_fuzzy on equities(fuzzy)')
c.execute("""
CREATE TABLE futures(
sid integer,
symbol text,
asset_name text,
start_date integer,
end_date integer,
first_traded integer,
exchange text,
root_symbol text,
notice_date integer,
expiration_date integer,
contract_multiplier real
)""")
c.execute('CREATE INDEX futures_sid on futures(sid)')
c.execute('CREATE INDEX futures_root_symbol on equities(symbol)')
c.execute("""
CREATE TABLE asset_router
(sid integer,
asset_type text)
""")
c.execute('CREATE INDEX asset_router_sid on asset_router(sid)')
self.conn.commit()
def asset_type_by_sid(self, sid):
try:
return self._asset_type_cache[sid]
except KeyError:
pass
c = self.conn.cursor()
# Python 3 compatibility required forcing to int for sid = 0.
t = (int(sid),)
query = 'select asset_type from asset_router where sid=:sid'
c.execute(query, t)
data = c.fetchone()
if data is None:
return
asset_type = data[0]
self._asset_type_cache[sid] = asset_type
return asset_type
def retrieve_asset(self, sid, default_none=False):
if isinstance(sid, Asset):
return sid
try:
asset = self._asset_cache[sid]
except KeyError:
asset_type = self.asset_type_by_sid(sid)
if asset_type == 'equity':
asset = self._retrieve_equity(sid)
elif asset_type == 'future':
asset = self._retrieve_futures_contract(sid)
else:
asset = None
self._asset_cache[sid] = asset
if asset is not None:
return asset
elif default_none:
return None
else:
raise SidNotFound(sid=sid)
def retrieve_all(self, sids, default_none=False):
return [self.retrieve_asset(sid) for sid in sids]
def _retrieve_equity(self, sid):
try:
return self._equity_cache[sid]
except KeyError:
pass
c = self.conn.cursor()
c.row_factory = Row
t = (int(sid),)
c.execute(EQUITY_BY_SID_QUERY, t)
data = dict(c.fetchone())
if data:
if data['start_date']:
data['start_date'] = pd.Timestamp(data['start_date'], tz='UTC')
if data['end_date']:
data['end_date'] = pd.Timestamp(data['end_date'], tz='UTC')
if data['first_traded']:
data['first_traded'] = pd.Timestamp(
data['first_traded'], tz='UTC')
equity = Equity(**data)
else:
equity = None
self._equity_cache[sid] = equity
return equity
def _retrieve_futures_contract(self, sid):
try:
return self._future_cache[sid]
except KeyError:
pass
c = self.conn.cursor()
t = (int(sid),)
c.row_factory = Row
c.execute(FUTURE_BY_SID_QUERY, t)
data = dict(c.fetchone())
if data:
if data['start_date']:
data['start_date'] = pd.Timestamp(data['start_date'], tz='UTC')
if data['end_date']:
data['end_date'] = pd.Timestamp(data['end_date'], tz='UTC')
if data['first_traded']:
data['first_traded'] = pd.Timestamp(
data['first_traded'], tz='UTC')
if data['notice_date']:
data['notice_date'] = pd.Timestamp(
data['notice_date'], tz='UTC')
if data['expiration_date']:
data['expiration_date'] = pd.Timestamp(
data['expiration_date'], tz='UTC')
future = Future(**data)
else:
future = None
self._future_cache[sid] = future
return future
def lookup_symbol_resolve_multiple(self, symbol, as_of_date=None):
"""
Return matching Asset of name symbol in database.
If multiple Assets are found and as_of_date is not set,
raises MultipleSymbolsFound.
If no Asset was active at as_of_date, and allow_expired is False
raises SymbolNotFound.
"""
if as_of_date is not None:
as_of_date = pd.Timestamp(normalize_date(as_of_date))
c = self.conn.cursor()
if as_of_date:
# If one SID exists for symbol, return that symbol
t = (symbol, as_of_date.value, as_of_date.value)
query = ("select sid from equities "
"where symbol=? "
"and start_date<=? "
"and end_date>=?")
c.execute(query, t)
candidates = c.fetchall()
if len(candidates) == 1:
return self._retrieve_equity(candidates[0][0])
# If no SID exists for symbol, return SID with the
# highest-but-not-over end_date
if len(candidates) == 0:
t = (symbol, as_of_date.value)
query = ("select sid from equities "
"where symbol=? "
"and start_date<=? "
"order by end_date desc "
"limit 1")
c.execute(query, t)
data = c.fetchone()
if data:
return self._retrieve_equity(data[0])
# If multiple SIDs exist for symbol, return latest start_date with
# end_date as a tie-breaker
if len(candidates) > 1:
t = (symbol, as_of_date.value)
query = ("select sid from equities "
"where symbol=? " +
"and start_date<=? " +
"order by start_date desc, end_date desc " +
"limit 1")
c.execute(query, t)
data = c.fetchone()
if data:
return self._retrieve_equity(data[0])
raise SymbolNotFound(symbol=symbol)
else:
t = (symbol,)
query = ("select sid from equities where symbol=?")
c.execute(query, t)
data = c.fetchall()
if len(data) == 1:
return self._retrieve_equity(data[0][0])
elif not data:
raise SymbolNotFound(symbol=symbol)
else:
options = []
for row in data:
sid = row[0]
asset = self._retrieve_equity(sid)
options.append(asset)
raise MultipleSymbolsFound(symbol=symbol,
options=options)
def lookup_symbol(self, symbol, as_of_date, fuzzy=False):
"""
If a fuzzy string is provided, then we try various symbols based on
the provided symbol. This is to facilitate mapping from a broker's
symbol to ours in cases where mapping to the broker's symbol loses
information. For example, if we have CMCS_A, but a broker has CMCSA,
when the broker provides CMCSA, it can also provide fuzzy='_',
so we can find a match by inserting an underscore.
"""
symbol = symbol.upper()
as_of_date = normalize_date(as_of_date)
if not fuzzy:
try:
return self.lookup_symbol_resolve_multiple(symbol, as_of_date)
except SymbolNotFound:
return None
else:
c = self.conn.cursor()
fuzzy = symbol.replace(self.fuzzy_char, '')
t = (fuzzy, as_of_date.value, as_of_date.value)
query = ("select sid from equities "
"where fuzzy=? " +
"and start_date<=? " +
"and end_date>=?")
c.execute(query, t)
candidates = c.fetchall()
# If one SID exists for symbol, return that symbol
if len(candidates) == 1:
return self._retrieve_equity(candidates[0][0])
# If multiple SIDs exist for symbol, return latest start_date with
# end_date as a tie-breaker
if len(candidates) > 1:
t = (symbol, as_of_date.value)
query = ("select sid from equities "
"where symbol=? " +
"and start_date<=? " +
"order by start_date desc, end_date desc" +
"limit 1")
c.execute(query, t)
data = c.fetchone()
if data:
return self._retrieve_equity(data[0])
def lookup_future_chain(self, root_symbol, as_of_date, knowledge_date):
""" Return the futures chain for a given root symbol.
Parameters
----------
root_symbol : str
Root symbol of the desired future.
as_of_date : pd.Timestamp or pd.NaT
Date at which the chain determination is rooted. I.e. the
existing contract whose notice date is first after this
date is the primary contract, etc. If NaT is given, the
chain is unbounded, and all contracts for this root symbol
are returned.
knowledge_date : pd.Timestamp or pd.NaT
Date for determining which contracts exist for inclusion in
this chain. Contracts exist only if they have a start_date
on or before this date. If NaT is given and as_of_date is
is not NaT, the value of as_of_date is used for
knowledge_date.
Returns
-------
list
A list of Future objects, the chain for the given
parameters.
Raises
------
RootSymbolNotFound
Raised when a future chain could not be found for the given
root symbol.
"""
c = self.conn.cursor()
if as_of_date is pd.NaT:
# If the as_of_date is NaT, get all contracts for this
# root symbol.
t = {'root_symbol': root_symbol}
c.execute("""
select sid from futures
where root_symbol=:root_symbol
order by notice_date asc
""", t)
else:
if knowledge_date is pd.NaT:
# If knowledge_date is NaT, default to using as_of_date
t = {'root_symbol': root_symbol,
'as_of_date': as_of_date.value,
'knowledge_date': as_of_date.value}
else:
t = {'root_symbol': root_symbol,
'as_of_date': as_of_date.value,
'knowledge_date': knowledge_date.value}
c.execute("""
select sid from futures
where root_symbol=:root_symbol
and :as_of_date < notice_date
and start_date <= :knowledge_date
order by notice_date asc
""", t)
sids = [r[0] for r in c.fetchall()]
if not sids:
# Check if root symbol exists.
c.execute("""
select count(sid) from futures where root_symbol=:root_symbol
""", t)
count = c.fetchone()[0]
if count == 0:
raise RootSymbolNotFound(root_symbol=root_symbol)
else:
# If symbol exists, return empty future chain.
return []
return [self._retrieve_futures_contract(sid) for sid in sids]
@property
def sids(self):
c = self.conn.cursor()
query = 'select sid from asset_router'
c.execute(query)
return [r[0] for r in c.fetchall()]
def _lookup_generic_scalar(self,
asset_convertible,
as_of_date,
matches,
missing):
"""
Convert asset_convertible to an asset.
On success, append to matches.
On failure, append to missing.
"""
if isinstance(asset_convertible, Asset):
matches.append(asset_convertible)
elif isinstance(asset_convertible, Integral):
try:
result = self.retrieve_asset(int(asset_convertible))
except SidNotFound:
missing.append(asset_convertible)
return None
matches.append(result)
elif isinstance(asset_convertible, string_types):
try:
matches.append(
self.lookup_symbol_resolve_multiple(
asset_convertible,
as_of_date,
)
)
except SymbolNotFound:
missing.append(asset_convertible)
return None
else:
raise NotAssetConvertible(
"Input was %s, not AssetConvertible."
% asset_convertible
)
def lookup_generic(self,
asset_convertible_or_iterable,
as_of_date):
"""
Convert a AssetConvertible or iterable of AssetConvertibles into
a list of Asset objects.
This method exists primarily as a convenience for implementing
user-facing APIs that can handle multiple kinds of input. It should
not be used for internal code where we already know the expected types
of our inputs.
Returns a pair of objects, the first of which is the result of the
conversion, and the second of which is a list containing any values
that couldn't be resolved.
"""
matches = []
missing = []
# Interpret input as scalar.
if isinstance(asset_convertible_or_iterable, AssetConvertible):
self._lookup_generic_scalar(
asset_convertible=asset_convertible_or_iterable,
as_of_date=as_of_date,
matches=matches,
missing=missing,
)
try:
return matches[0], missing
except IndexError:
if hasattr(asset_convertible_or_iterable, '__int__'):
raise SidNotFound(sid=asset_convertible_or_iterable)
else:
raise SymbolNotFound(symbol=asset_convertible_or_iterable)
# Interpret input as iterable.
try:
iterator = iter(asset_convertible_or_iterable)
except TypeError:
raise NotAssetConvertible(
"Input was not a AssetConvertible "
"or iterable of AssetConvertible."
)
for obj in iterator:
self._lookup_generic_scalar(obj, as_of_date, matches, missing)
return matches, missing
def map_identifier_index_to_sids(self, index, as_of_date):
"""
This method is for use in sanitizing a user's DataFrame or Panel
inputs.
Takes the given index of identifiers, checks their types, builds assets
if necessary, and returns a list of the sids that correspond to the
input index.
Parameters
__________
index : Iterable
An iterable containing ints, strings, or Assets
as_of_date : pandas.Timestamp
A date to be used to resolve any dual-mapped symbols
Returns
_______
List
A list of integer sids corresponding to the input index
"""
# This method assumes that the type of the objects in the index is
# consistent and can, therefore, be taken from the first identifier
first_identifier = index[0]
# Ensure that input is AssetConvertible (integer, string, or Asset)
if not isinstance(first_identifier, AssetConvertible):
raise MapAssetIdentifierIndexError(obj=first_identifier)
# If sids are provided, no mapping is necessary
if isinstance(first_identifier, Integral):
return index
# If symbols or Assets are provided, construction and mapping is
# necessary
self.consume_identifiers(index)
# Look up all Assets for mapping
matches = []
missing = []
for identifier in index:
self._lookup_generic_scalar(identifier, as_of_date,
matches, missing)
# Handle missing assets
if len(missing) > 0:
warnings.warn("Missing assets for identifiers: " + missing)
# Return a list of the sids of the found assets
return [asset.sid for asset in matches]
def _insert_metadata(self, identifier, **kwargs):
"""
Inserts the given metadata kwargs to the entry for the given
identifier. Matching fields in the existing entry will be overwritten.
:param identifier: The identifier for which to insert metadata
:param kwargs: The keyed metadata to insert
"""
if identifier in self.metadata_cache:
# Multiple pass insertion no longer supported.
# This could and probably should raise an Exception, but is
# currently just a short-circuit for compatibility with existing
# testing structure in the test_algorithm module which creates
# multiple sources which all insert redundant metadata.
return
entry = {}
for key, value in kwargs.items():
# Do not accept invalid fields
if key not in ASSET_FIELDS:
continue
# Do not accept Nones
if value is None:
continue
# Do not accept empty strings
if value == '':
continue
# Do not accept nans from dataframes
if isinstance(value, float) and np.isnan(value):
continue
entry[key] = value
# Check if the sid is declared
try:
entry['sid']
except KeyError:
# If the identifier is not a sid, assign one
if hasattr(identifier, '__int__'):
entry['sid'] = identifier.__int__()
else:
if self.allow_sid_assignment:
# Assign the sid the value of its insertion order.
# This assumes that we are assigning values to all assets.
entry['sid'] = len(self.metadata_cache)
else:
raise SidAssignmentError(identifier=identifier)
# If the file_name is in the kwargs, it will be used as the symbol
try:
entry['symbol'] = entry.pop('file_name')
except KeyError:
pass
# If the identifier coming in was a string and there is no defined
# symbol yet, set the symbol to the incoming identifier
try:
entry['symbol']
pass
except KeyError:
if isinstance(identifier, string_types):
entry['symbol'] = identifier
# If the company_name is in the kwargs, it may be the asset_name
try:
company_name = entry.pop('company_name')
try:
entry['asset_name']
except KeyError:
entry['asset_name'] = company_name
except KeyError:
pass
# If dates are given as nanos, pop them
try:
entry['start_date'] = entry.pop('start_date_nano')
except KeyError:
pass
try:
entry['end_date'] = entry.pop('end_date_nano')
except KeyError:
pass
try:
entry['notice_date'] = entry.pop('notice_date_nano')
except KeyError:
pass
try:
entry['expiration_date'] = entry.pop('expiration_date_nano')
except KeyError:
pass
# Process dates to Timestamps
try:
entry['start_date'] = pd.Timestamp(entry['start_date'], tz='UTC')
except KeyError:
# Set a default start_date of the EPOCH, so that all date queries
# work when a start date is not provided.
entry['start_date'] = pd.Timestamp(0, tz='UTC')
try:
# Set a default end_date of 'now', so that all date queries
# work when a end date is not provided.
entry['end_date'] = pd.Timestamp(entry['end_date'], tz='UTC')
except KeyError:
entry['end_date'] = self.end_date_to_assign
try:
entry['notice_date'] = pd.Timestamp(entry['notice_date'],
tz='UTC')
except KeyError:
pass
try:
entry['expiration_date'] = pd.Timestamp(entry['expiration_date'],
tz='UTC')
except KeyError:
pass
# Build an Asset of the appropriate type, default to Equity
asset_type = entry.pop('asset_type', 'equity')
if asset_type.lower() == 'equity':
try:
fuzzy = entry['symbol'].replace(self.fuzzy_char, '') \
if self.fuzzy_char else None
except KeyError:
fuzzy = None
asset = Equity(**entry)
c = self.conn.cursor()
t = (asset.sid,
asset.symbol,
asset.asset_name,
asset.start_date.value if asset.start_date else None,
asset.end_date.value if asset.end_date else None,
asset.first_traded.value if asset.first_traded else None,
asset.exchange,
fuzzy)
c.execute("""INSERT INTO equities(
sid,
symbol,
asset_name,
start_date,
end_date,
first_traded,
exchange,
fuzzy)
VALUES(?, ?, ?, ?, ?, ?, ?, ?)""", t)
t = (asset.sid,
'equity')
c.execute("""INSERT INTO asset_router(sid, asset_type)
VALUES(?, ?)""", t)
elif asset_type.lower() == 'future':
asset = Future(**entry)
c = self.conn.cursor()
t = (asset.sid,
asset.symbol,
asset.asset_name,
asset.start_date.value if asset.start_date else None,
asset.end_date.value if asset.end_date else None,
asset.first_traded.value if asset.first_traded else None,
asset.exchange,
asset.root_symbol,
asset.notice_date.value if asset.notice_date else None,
asset.expiration_date.value
if asset.expiration_date else None,
asset.contract_multiplier)
c.execute("""INSERT INTO futures(
sid,
symbol,
asset_name,
start_date,
end_date,
first_traded,
exchange,
root_symbol,
notice_date,
expiration_date,
contract_multiplier)
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", t)
t = (asset.sid,
'future')
c.execute("""INSERT INTO asset_router(sid, asset_type)
VALUES(?, ?)""", t)
else:
raise InvalidAssetType(asset_type=asset_type)
self.metadata_cache[identifier] = entry
def consume_identifiers(self, identifiers):
"""
Consumes the given identifiers in to the metadata cache of this
AssetFinder.
"""
for identifier in identifiers:
# Handle case where full Assets are passed in
# For example, in the creation of a DataFrameSource, the source's
# 'sid' args may be full Assets
if isinstance(identifier, Asset):
sid = identifier.sid
metadata = identifier.to_dict()
metadata['asset_type'] = identifier.__class__.__name__
self.insert_metadata(identifier=sid, **metadata)
else:
self.insert_metadata(identifier)
def consume_metadata(self, metadata):
"""
Consumes the provided metadata in to the metadata cache. The
existing values in the cache will be overwritten when there
is a conflict.
:param metadata: The metadata to be consumed
"""
# Handle dicts
if isinstance(metadata, dict):
self._insert_metadata_dict(metadata)
# Handle DataFrames
elif isinstance(metadata, pd.DataFrame):
self._insert_metadata_dataframe(metadata)
# Handle readables
elif hasattr(metadata, 'read'):
self._insert_metadata_readable(metadata)
else:
raise ConsumeAssetMetaDataError(obj=metadata)
def clear_metadata(self):
"""
Used for testing.
"""
self.metadata_cache = {}
self.conn = sqlite3.connect(':memory:')
self.create_db_tables()
def insert_metadata(self, identifier, **kwargs):
self._insert_metadata(identifier, **kwargs)
self.conn.commit()
def _insert_metadata_dataframe(self, dataframe):
for identifier, row in dataframe.iterrows():
self._insert_metadata(identifier, **row)
self.conn.commit()
def _insert_metadata_dict(self, dict):
for identifier, entry in dict.items():
self._insert_metadata(identifier, **entry)
self.conn.commit()
def _insert_metadata_readable(self, readable):
for row in readable.read():
# Parse out the row of the readable object
metadata_dict = {}
for field in ASSET_FIELDS:
try:
row_value = row[field]
# Avoid passing placeholders
if row_value and (row_value != 'None'):
metadata_dict[field] = row[field]
except KeyError:
continue
except IndexError:
continue
# Locate the identifier, fail if not found
if 'sid' in metadata_dict:
identifier = metadata_dict['sid']
elif 'symbol' in metadata_dict:
identifier = metadata_dict['symbol']
else:
raise ConsumeAssetMetaDataError(obj=row)
self._insert_metadata(identifier, **metadata_dict)
self.conn.commit()
def _compute_asset_lifetimes(self):
"""
Compute and cache a recarry of asset lifetimes.
FUTURE OPTIMIZATION: We're looping over a big array, which means this
probably should be in C/Cython.
"""
with self.conn as transaction:
results = transaction.execute(
'SELECT sid, start_date, end_date from equities'
).fetchall()
lifetimes = np.recarray(
shape=(len(results),),
dtype=[('sid', 'i8'), ('start', 'i8'), ('end', 'i8')],
)
# TODO: This is **WAY** slower than it could be because we have to
# check for None everywhere. If we represented "no start date" as
# 0, and "no end date" as MAX_INT in our metadata, this would be
# significantly faster.
NO_START = 0
NO_END = np.iinfo(int).max
for idx, (sid, start, end) in enumerate(results):
lifetimes[idx] = (
sid,
start if start is not None else NO_START,
end if end is not None else NO_END,
)
return lifetimes
def lifetimes(self, dates):
"""
Compute a DataFrame representing asset lifetimes for the specified date
range.
Parameters
----------
dates : pd.DatetimeIndex
The dates for which to compute lifetimes.
Returns
-------
lifetimes : pd.DataFrame
A frame of dtype bool with `dates` as index and an Int64Index of
assets as columns. The value at `lifetimes.loc[date, asset]` will
be True iff `asset` existed on `data`.
See Also
--------
numpy.putmask
"""
# This is a less than ideal place to do this, because if someone adds
# assets to the finder after we've touched lifetimes we won't have
# those new assets available. Mutability is not my favorite
# programming feature.
if self._asset_lifetimes is None:
self._asset_lifetimes = self._compute_asset_lifetimes()
lifetimes = self._asset_lifetimes
raw_dates = dates.asi8[:, None]
mask = (lifetimes.start <= raw_dates) & (raw_dates <= lifetimes.end)
return pd.DataFrame(mask, index=dates, columns=lifetimes.sid)
class AssetConvertible(with_metaclass(ABCMeta)):
"""
ABC for types that are convertible to integer-representations of
Assets.
Includes Asset, six.string_types, and Integral
"""
pass
AssetConvertible.register(Integral)
AssetConvertible.register(Asset)
# Use six.string_types for Python2/3 compatibility
for _type in string_types:
AssetConvertible.register(_type)
class NotAssetConvertible(ValueError):
pass
<file_sep>"""
Tests for filter terms.
"""
from unittest import TestCase
from numpy import (
arange,
array,
eye,
float64,
nan,
nanpercentile,
ones_like,
putmask,
)
from numpy.testing import assert_array_equal
from pandas import (
DataFrame,
date_range,
Int64Index,
)
from zipline.errors import BadPercentileBounds
from zipline.modelling.factor import TestingFactor
class SomeFactor(TestingFactor):
inputs = ()
window_length = 0
class FilterTestCase(TestCase):
def setUp(self):
self.f = SomeFactor()
self.dates = date_range('2014-01-01', periods=5, freq='D')
self.assets = Int64Index(range(5))
self.mask = DataFrame(True, index=self.dates, columns=self.assets)
def tearDown(self):
pass
def maskframe(self, array):
return DataFrame(
array,
index=date_range('2014-01-01', periods=array.shape[0], freq='D'),
columns=arange(array.shape[1]),
)
def test_bad_input(self):
f = self.f
bad_percentiles = [
(-.1, 10),
(10, 100.1),
(20, 10),
(50, 50),
]
for min_, max_ in bad_percentiles:
with self.assertRaises(BadPercentileBounds):
f.percentile_between(min_, max_)
def test_rank_percentile_nice_partitions(self):
# Test case with nicely-defined partitions.
eye5 = eye(5, dtype=float64)
eye6 = eye(6, dtype=float64)
nanmask = array([[0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 0]], dtype=bool)
nandata = eye6.copy()
putmask(nandata, nanmask, nan)
for quintile in range(5):
factor = self.f.percentile_between(
quintile * 20.0,
(quintile + 1) * 20.0,
)
# Test w/o any NaNs
result = factor.compute_from_arrays(
[eye5],
self.maskframe(ones_like(eye5, dtype=bool)),
)
# Test with NaNs in the data.
nandata_result = factor.compute_from_arrays(
[nandata],
self.maskframe(ones_like(nandata, dtype=bool)),
)
# Test with Falses in the mask.
nanmask_result = factor.compute_from_arrays(
[eye6],
self.maskframe(~nanmask),
)
assert_array_equal(nandata_result, nanmask_result)
if quintile < 4:
# There are 4 0s and one 1 in each row, so the first 4
# quintiles should be all the locations with zeros in the input
# array.
assert_array_equal(result, ~eye5.astype(bool))
# Should reject all the ones, plus the nans.
assert_array_equal(
nandata_result,
~(nanmask | eye6.astype(bool))
)
else:
# The last quintile should contain all the 1s.
assert_array_equal(result, eye(5, dtype=bool))
# Should accept all the 1s.
assert_array_equal(nandata_result, eye(6, dtype=bool))
def test_rank_percentile_nasty_partitions(self):
# Test case with nasty partitions: divide up 5 assets into quartiles.
data = arange(25, dtype=float).reshape(5, 5) % 4
nandata = data.copy()
nandata[eye(5, dtype=bool)] = nan
for quartile in range(4):
lower_bound = quartile * 25.0
upper_bound = (quartile + 1) * 25.0
factor = self.f.percentile_between(lower_bound, upper_bound)
# There isn't a nice definition of correct behavior here, so for
# now we guarantee the behavior of numpy.nanpercentile.
result = factor.compute_from_arrays([data], self.mask)
min_value = nanpercentile(data, lower_bound, axis=1, keepdims=True)
max_value = nanpercentile(data, upper_bound, axis=1, keepdims=True)
assert_array_equal(
result,
(min_value <= data) & (data <= max_value),
)
nanresult = factor.compute_from_arrays([nandata], self.mask)
min_value = nanpercentile(
nandata,
lower_bound,
axis=1,
keepdims=True,
)
max_value = nanpercentile(
nandata,
upper_bound,
axis=1,
keepdims=True,
)
assert_array_equal(
nanresult,
(min_value <= nandata) & (nandata <= max_value),
)
def test_sequenced_filter(self):
first = SomeFactor() < 1
first_input = eye(5)
first_result = first.compute_from_arrays([first_input], self.mask)
assert_array_equal(first_result, ~eye(5, dtype=bool))
# Second should pick out the fourth column.
second = SomeFactor().eq(3.0)
second_input = arange(25, dtype=float).reshape(5, 5) % 5
sequenced = first.then(second)
result = sequenced.compute_from_arrays(
[first_result, second_input],
self.mask,
)
expected_result = (first_result & (second_input == 3.0))
assert_array_equal(result, expected_result)
def test_sequenced_filter_order_dependent(self):
f = SomeFactor() < 1
f_input = eye(5)
f_result = f.compute_from_arrays([f_input], self.mask)
assert_array_equal(f_result, ~eye(5, dtype=bool))
g = SomeFactor().percentile_between(80, 100)
g_input = arange(25, dtype=float).reshape(5, 5) % 5
g_result = g.compute_from_arrays([g_input], self.mask)
assert_array_equal(g_result, g_input == 4)
result = f.then(g).compute_from_arrays(
[f_result, g_input],
self.mask,
)
# Input data is strictly increasing, so the result should be the top
# value not filtered by first.
expected_result = array(
[[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 1, 0]],
dtype=bool,
)
assert_array_equal(result, expected_result)
result = g.then(f).compute_from_arrays(
[g_result, f_input],
self.mask,
)
# Percentile calculated first, then diagonal is removed.
expected_result = array(
[[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 0, 0]],
dtype=bool,
)
assert_array_equal(result, expected_result)
<file_sep>"""
Tests for Algorithms running the full FFC stack.
"""
from unittest import TestCase
from os.path import (
dirname,
join,
realpath,
)
from numpy import (
array,
full_like,
nan,
)
from numpy.testing import assert_almost_equal
from pandas import (
concat,
DataFrame,
DatetimeIndex,
Panel,
read_csv,
Series,
Timestamp,
)
from six import iteritems
from testfixtures import TempDirectory
from zipline.algorithm import TradingAlgorithm
from zipline.api import (
# add_filter,
add_factor,
get_datetime,
)
from zipline.assets import AssetFinder
# from zipline.data.equities import USEquityPricing
from zipline.data.ffc.loaders.us_equity_pricing import (
BcolzDailyBarReader,
DailyBarWriterFromCSVs,
SQLiteAdjustmentReader,
SQLiteAdjustmentWriter,
USEquityPricingLoader,
)
# from zipline.modelling.factor import CustomFactor
from zipline.modelling.factor.technical import VWAP
from zipline.utils.test_utils import (
make_simple_asset_info,
str_to_seconds,
)
from zipline.utils.tradingcalendar import trading_days
TEST_RESOURCE_PATH = join(
dirname(dirname(realpath(__file__))), # zipline_repo/tests
'resources',
'modelling_inputs',
)
def rolling_vwap(df, length):
"Simple rolling vwap implementation for testing"
closes = df['close'].values
volumes = df['volume'].values
product = closes * volumes
out = full_like(closes, nan)
for upper_bound in range(length, len(closes) + 1):
bounds = slice(upper_bound - length, upper_bound)
out[upper_bound - 1] = product[bounds].sum() / volumes[bounds].sum()
return Series(out, index=df.index)
class FFCAlgorithmTestCase(TestCase):
@classmethod
def setUpClass(cls):
cls.AAPL = 1
cls.MSFT = 2
cls.BRK_A = 3
cls.assets = [cls.AAPL, cls.MSFT, cls.BRK_A]
asset_info = make_simple_asset_info(
cls.assets,
Timestamp('2014'),
Timestamp('2015'),
['AAPL', 'MSFT', 'BRK_A'],
)
cls.asset_finder = AssetFinder(asset_info)
cls.tempdir = tempdir = TempDirectory()
tempdir.create()
try:
cls.raw_data, cls.bar_reader = cls.create_bar_reader(tempdir)
cls.adj_reader = cls.create_adjustment_reader(tempdir)
cls.ffc_loader = USEquityPricingLoader(
cls.bar_reader, cls.adj_reader
)
except:
cls.tempdir.cleanup()
raise
cls.dates = cls.raw_data[cls.AAPL].index.tz_localize('UTC')
@classmethod
def create_bar_reader(cls, tempdir):
resources = {
cls.AAPL: join(TEST_RESOURCE_PATH, 'AAPL.csv'),
cls.MSFT: join(TEST_RESOURCE_PATH, 'MSFT.csv'),
cls.BRK_A: join(TEST_RESOURCE_PATH, 'BRK-A.csv'),
}
raw_data = {
asset: read_csv(path, parse_dates=['day']).set_index('day')
for asset, path in iteritems(resources)
}
# Add 'price' column as an alias because all kinds of stuff in zipline
# depends on it being present. :/
for frame in raw_data.values():
frame['price'] = frame['close']
writer = DailyBarWriterFromCSVs(resources)
data_path = tempdir.getpath('testdata.bcolz')
table = writer.write(data_path, trading_days, cls.assets)
return raw_data, BcolzDailyBarReader(table)
@classmethod
def create_adjustment_reader(cls, tempdir):
dbpath = tempdir.getpath('adjustments.sqlite')
writer = SQLiteAdjustmentWriter(dbpath)
splits = DataFrame.from_records([
{
'effective_date': str_to_seconds('2014-06-09'),
'ratio': (1 / 7.0),
'sid': cls.AAPL,
}
])
mergers = dividends = DataFrame(
{
# Hackery to make the dtypes correct on an empty frame.
'effective_date': array([], dtype=int),
'ratio': array([], dtype=float),
'sid': array([], dtype=int),
},
index=DatetimeIndex([], tz='UTC'),
columns=['effective_date', 'ratio', 'sid'],
)
writer.write(splits, mergers, dividends)
return SQLiteAdjustmentReader(dbpath)
@classmethod
def tearDownClass(cls):
cls.tempdir.cleanup()
def make_source(self):
return Panel(self.raw_data).tz_localize('UTC', axis=1)
def test_handle_adjustment(self):
AAPL, MSFT, BRK_A = assets = self.AAPL, self.MSFT, self.BRK_A
raw_data = self.raw_data
adjusted_data = {k: v.copy() for k, v in iteritems(raw_data)}
AAPL_split_date = Timestamp("2014-06-09", tz='UTC')
split_loc = raw_data[AAPL].index.get_loc(AAPL_split_date)
# Our view of AAPL's history changes after the split.
ohlc = ['open', 'high', 'low', 'close']
adjusted_data[AAPL].ix[:split_loc, ohlc] /= 7.0
adjusted_data[AAPL].ix[:split_loc, ['volume']] *= 7.0
window_lengths = [1, 2, 5, 10]
# length -> asset -> expected vwap
vwaps = {length: {} for length in window_lengths}
vwap_keys = {}
for length in window_lengths:
vwap_keys[length] = "vwap_%d" % length
for asset in AAPL, MSFT, BRK_A:
raw = rolling_vwap(raw_data[asset], length)
adj = rolling_vwap(adjusted_data[asset], length)
vwaps[length][asset] = concat(
[
raw[:split_loc],
adj[split_loc:]
]
)
def initialize(context):
context.vwaps = []
for length, key in iteritems(vwap_keys):
context.vwaps.append(VWAP(window_length=length))
add_factor(context.vwaps[-1], name=key)
def handle_data(context, data):
today = get_datetime()
factors = data.factors
for length, key in iteritems(vwap_keys):
for asset in assets:
computed = factors.loc[asset, key]
expected = vwaps[length][asset].loc[today]
# Only having two places of precision here is a bit
# unfortunate.
assert_almost_equal(computed, expected, decimal=2)
# Do the same checks in before_trading_start
before_trading_start = handle_data
algo = TradingAlgorithm(
initialize=initialize,
handle_data=handle_data,
before_trading_start=before_trading_start,
data_frequency='daily',
ffc_loader=self.ffc_loader,
asset_finder=self.asset_finder,
start=self.dates[max(window_lengths)],
end=self.dates[-1],
)
algo.run(
source=self.make_source(),
# Yes, I really do want to use the start and end dates I passed to
# TradingAlgorithm.
overwrite_sim_params=False,
)
<file_sep>from numpy import (
float64,
uint32,
)
from zipline.data.dataset import (
Column,
DataSet,
)
class USEquityPricing(DataSet):
open = Column(float64)
high = Column(float64)
low = Column(float64)
close = Column(float64)
volume = Column(uint32)
<file_sep>from .factor import (
Factor,
TestingFactor,
CustomFactor,
)
__all__ = [
'Factor',
'TestingFactor',
'CustomFactor',
]
<file_sep>"""
classifier.py
"""
from zipline.modelling.term import Term
class Classifier(Term):
pass
<file_sep># Copyright 2015 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import (
ABCMeta,
abstractmethod,
)
from contextlib import contextmanager
from errno import ENOENT
from os import remove
from os.path import exists
from bcolz import (
carray,
ctable,
)
from click import progressbar
from numpy import (
array,
array_equal,
float64,
floating,
full,
iinfo,
integer,
issubdtype,
uint32,
)
from pandas import (
DatetimeIndex,
read_csv,
Timestamp,
)
from six import (
iteritems,
string_types,
with_metaclass,
)
import sqlite3
from zipline.data.ffc.base import FFCLoader
from zipline.data.ffc.loaders._us_equity_pricing import (
_compute_row_slices,
_read_bcolz_data,
load_adjustments_from_sqlite,
)
from zipline.lib.adjusted_array import (
adjusted_array,
)
from zipline.errors import NoFurtherDataError
OHLC = frozenset(['open', 'high', 'low', 'close'])
US_EQUITY_PRICING_BCOLZ_COLUMNS = [
'open', 'high', 'low', 'close', 'volume', 'day', 'id'
]
DAILY_US_EQUITY_PRICING_DEFAULT_FILENAME = 'daily_us_equity_pricing.bcolz'
SQLITE_ADJUSTMENT_COLUMNS = frozenset(['effective_date', 'ratio', 'sid'])
SQLITE_ADJUSTMENT_COLUMN_DTYPES = {
'effective_date': integer,
'ratio': floating,
'sid': integer,
}
SQLITE_ADJUSTMENT_TABLENAMES = frozenset(['splits', 'dividends', 'mergers'])
UINT32_MAX = iinfo(uint32).max
@contextmanager
def passthrough(obj):
yield obj
class BcolzDailyBarWriter(with_metaclass(ABCMeta)):
"""
Class capable of writing daily OHLCV data to disk in a format that can be
read efficiently by BcolzDailyOHLCVReader.
See Also
--------
BcolzDailyBarReader : Consumer of the data written by this class.
"""
@abstractmethod
def gen_tables(self, assets):
"""
Return an iterator of pairs of (asset_id, bcolz.ctable).
"""
raise NotImplementedError()
@abstractmethod
def to_uint32(self, array, colname):
"""
Convert raw column values produced by gen_tables into uint32 values.
Parameters
----------
array : np.array
An array of raw values.
colname : str, {'open', 'high', 'low', 'close', 'volume', 'day'}
The name of the column being loaded.
For output being read by the default BcolzOHLCVReader, data should be
stored in the following manner:
- Pricing columns (Open, High, Low, Close) should be stored as 1000 *
as-traded dollar value.
- Volume should be the as-traded volume.
- Dates should be stored as seconds since midnight UTC, Jan 1, 1970.
"""
raise NotImplementedError()
def write(self, filename, calendar, assets, show_progress=False):
"""
Parameters
----------
filename : str
The location at which we should write our output.
calendar : pandas.DatetimeIndex
Calendar to use to compute asset calendar offsets.
assets : pandas.Int64Index
The assets for which to write data.
show_progress : bool
Whether or not to show a progress bar while writing.
Returns
-------
table : bcolz.ctable
The newly-written table.
"""
_iterator = self.gen_tables(assets)
if show_progress:
pbar = progressbar(
_iterator,
length=len(assets),
item_show_func=lambda i: i if i is None else str(i[0]),
label="Merging asset files:",
)
with pbar as pbar_iterator:
return self._write_internal(filename, calendar, pbar_iterator)
return self._write_internal(filename, calendar, _iterator)
def _write_internal(self, filename, calendar, iterator):
"""
Internal implementation of write.
`iterator` should be an iterator yielding pairs of (asset, ctable).
"""
total_rows = 0
first_row = {}
last_row = {}
calendar_offset = {}
# Maps column name -> output carray.
columns = {
k: carray(array([], dtype=uint32))
for k in US_EQUITY_PRICING_BCOLZ_COLUMNS
}
for asset_id, table in iterator:
nrows = len(table)
for column_name in columns:
if column_name == 'id':
# We know what the content of this column is, so don't
# bother reading it.
columns['id'].append(full((nrows,), asset_id))
continue
columns[column_name].append(
self.to_uint32(table[column_name][:], column_name)
)
# Bcolz doesn't support ints as keys in `attrs`, so convert
# assets to strings for use as attr keys.
asset_key = str(asset_id)
# Calculate the index into the array of the first and last row
# for this asset. This allows us to efficiently load single
# assets when querying the data back out of the table.
first_row[asset_key] = total_rows
last_row[asset_key] = total_rows + nrows - 1
total_rows += nrows
# Calculate the number of trading days between the first date
# in the stored data and the first date of **this** asset. This
# offset used for output alignment by the reader.
# HACK: Index with a list so that we get back an array we can pass
# to self.to_uint32. We could try to extract this in the loop
# above, but that makes the logic a lot messier.
asset_first_day = self.to_uint32(table['day'][[0]], 'day')[0]
calendar_offset[asset_key] = calendar.get_loc(
Timestamp(asset_first_day, unit='s', tz='UTC'),
)
# This writes the table to disk.
full_table = ctable(
columns=[
columns[colname]
for colname in US_EQUITY_PRICING_BCOLZ_COLUMNS
],
names=US_EQUITY_PRICING_BCOLZ_COLUMNS,
rootdir=filename,
mode='w',
)
full_table.attrs['first_row'] = first_row
full_table.attrs['last_row'] = last_row
full_table.attrs['calendar_offset'] = calendar_offset
full_table.attrs['calendar'] = calendar.asi8.tolist()
return full_table
class DailyBarWriterFromCSVs(BcolzDailyBarWriter):
"""
BcolzDailyBarWriter constructed from a map from csvs to assets.
Parameters
----------
asset_map : dict
A map from asset_id -> path to csv with data for that asset.
CSVs should have the following columns:
day : datetime64
open : float64
high : float64
low : float64
close : float64
volume : int64
"""
_csv_dtypes = {
'open': float64,
'high': float64,
'low': float64,
'close': float64,
'volume': float64,
}
def __init__(self, asset_map):
self._asset_map = asset_map
def gen_tables(self, assets):
"""
Read CSVs as DataFrames from our asset map.
"""
dtypes = self._csv_dtypes
for asset in assets:
path = self._asset_map.get(asset)
if path is None:
raise KeyError("No path supplied for asset %s" % asset)
data = read_csv(path, parse_dates=['day'], dtype=dtypes)
yield asset, ctable.fromdataframe(data)
def to_uint32(self, array, colname):
arrmax = array.max()
if colname in OHLC:
self.check_uint_safe(arrmax * 1000, colname)
return (array * 1000).astype(uint32)
elif colname == 'volume':
self.check_uint_safe(arrmax, colname)
return array.astype(uint32)
elif colname == 'day':
nanos_per_second = (1000 * 1000 * 1000)
self.check_uint_safe(arrmax.view(int) / nanos_per_second, colname)
return (array.view(int) / nanos_per_second).astype(uint32)
@staticmethod
def check_uint_safe(value, colname):
if value >= UINT32_MAX:
raise ValueError(
"Value %s from column '%s' is too large" % (value, colname)
)
class BcolzDailyBarReader(object):
"""
Reader for raw pricing data written by BcolzDailyOHLCVWriter.
A Bcolz CTable is comprised of Columns and Attributes.
Columns
-------
The table with which this loader interacts contains the following columns:
['open', 'high', 'low', 'close', 'volume', 'day', 'id'].
The data in these columns is interpreted as follows:
- Price columns ('open', 'high', 'low', 'close') are interpreted as 1000 *
as-traded dollar value.
- Volume is interpreted as as-traded volume.
- Day is interpreted as seconds since midnight UTC, Jan 1, 1970.
- Id is the asset id of the row.
The data in each column is grouped by asset and then sorted by day within
each asset block.
The table is built to represent a long time range of data, e.g. ten years
of equity data, so the lengths of each asset block is not equal to each
other. The blocks are clipped to the known start and end date of each asset
to cut down on the number of empty values that would need to be included to
make a regular/cubic dataset.
When read across the open, high, low, close, and volume with the same
index should represent the same asset and day.
Attributes
----------
The table with which this loader interacts contains the following
attributes:
first_row : dict
Map from asset_id -> index of first row in the dataset with that id.
last_row : dict
Map from asset_id -> index of last row in the dataset with that id.
calendar_offset : dict
Map from asset_id -> calendar index of first row.
calendar : list[int64]
Calendar used to compute offsets, in asi8 format (ns since EPOCH).
We use first_row and last_row together to quickly find ranges of rows to
load when reading an asset's data into memory.
We use calendar_offset and calendar to orient loaded blocks within a
range of queried dates.
"""
def __init__(self, table):
if isinstance(table, string_types):
table = ctable(rootdir=table, mode='r')
self._table = table
self._calendar = DatetimeIndex(table.attrs['calendar'], tz='UTC')
self._first_rows = {
int(asset_id): start_index
for asset_id, start_index in iteritems(table.attrs['first_row'])
}
self._last_rows = {
int(asset_id): end_index
for asset_id, end_index in iteritems(table.attrs['last_row'])
}
self._calendar_offsets = {
int(id_): offset
for id_, offset in iteritems(table.attrs['calendar_offset'])
}
def _slice_locs(self, start_date, end_date):
try:
start = self._calendar.get_loc(start_date)
except KeyError:
if start_date < self._calendar[0]:
raise NoFurtherDataError(
msg=(
"FFC Query requesting data starting on {query_start}, "
"but first known date is {calendar_start}"
).format(
query_start=str(start_date),
calendar_start=str(self._calendar[0]),
)
)
else:
raise ValueError("Query start %s not in calendar" % start_date)
try:
stop = self._calendar.get_loc(end_date)
except:
if end_date > self._calendar[-1]:
raise NoFurtherDataError(
msg=(
"FFC Query requesting data up to {query_end}, "
"but last known date is {calendar_end}"
).format(
query_end=end_date,
calendar_end=self._calendar[-1],
)
)
else:
raise ValueError("Query end %s not in calendar" % end_date)
return start, stop
def _compute_slices(self, dates, assets):
"""
Compute the raw row indices to load for each asset on a query for the
given dates.
Parameters
----------
dates : pandas.DatetimeIndex
Dates of the query on which we want to compute row indices.
assets : pandas.Int64Index
Assets for which we want to compute row indices
Returns
-------
A 3-tuple of (first_rows, last_rows, offsets):
first_rows : np.array[intp]
Array with length == len(assets) containing the index of the first
row to load for each asset in `assets`.
last_rows : np.array[intp]
Array with length == len(assets) containing the index of the last
row to load for each asset in `assets`.
offset : np.array[intp]
Array with length == (len(asset) containing the index in a buffer
of length `dates` corresponding to the first row of each asset.
The value of offset[i] will be 0 if asset[i] existed at the start
of a query. Otherwise, offset[i] will be equal to the number of
entries in `dates` for which the asset did not yet exist.
"""
start, stop = self._slice_locs(dates[0], dates[-1])
# Sanity check that the requested date range matches our calendar.
# This could be removed in the future if it's materially affecting
# performance.
query_dates = self._calendar[start:stop + 1]
if not array_equal(query_dates.values, dates.values):
raise ValueError("Incompatible calendars!")
# The core implementation of the logic here is implemented in Cython
# for efficiency.
return _compute_row_slices(
self._first_rows,
self._last_rows,
self._calendar_offsets,
start,
stop,
assets,
)
def load_raw_arrays(self, columns, dates, assets):
first_rows, last_rows, offsets = self._compute_slices(dates, assets)
return _read_bcolz_data(
self._table,
(len(dates), len(assets)),
[column.name for column in columns],
first_rows,
last_rows,
offsets,
)
class SQLiteAdjustmentWriter(object):
"""
Writer for data to be read by SQLiteAdjustmentWriter
Parameters
----------
conn_or_path : str or sqlite3.Connection
A handle to the target sqlite database.
overwrite : bool, optional, default=False
If True and conn_or_path is a string, remove any existing files at the
given path before connecting.
See Also
--------
SQLiteAdjustmentReader
"""
def __init__(self, conn_or_path, overwrite=False):
if isinstance(conn_or_path, sqlite3.Connection):
self.conn = conn_or_path
elif isinstance(conn_or_path, str):
if overwrite and exists(conn_or_path):
try:
remove(conn_or_path)
except OSError as e:
if e.errno != ENOENT:
raise
self.conn = sqlite3.connect(conn_or_path)
else:
raise TypeError("Unknown connection type %s" % type(conn_or_path))
def write_frame(self, tablename, frame):
if frozenset(frame.columns) != SQLITE_ADJUSTMENT_COLUMNS:
raise ValueError(
"Unexpected frame columns:\n"
"Expected Columns: %s\n"
"Received Columns: %s" % (
SQLITE_ADJUSTMENT_COLUMNS,
frame.columns.tolist(),
)
)
elif tablename not in SQLITE_ADJUSTMENT_TABLENAMES:
raise ValueError(
"Adjustment table %s not in %s" % (
tablename, SQLITE_ADJUSTMENT_TABLENAMES
)
)
expected_dtypes = SQLITE_ADJUSTMENT_COLUMN_DTYPES
actual_dtypes = frame.dtypes
for colname, expected in iteritems(expected_dtypes):
actual = actual_dtypes[colname]
if not issubdtype(actual, expected):
raise TypeError(
"Expected data of type {expected} for column '{colname}', "
"but got {actual}.".format(
expected=expected,
colname=colname,
actual=actual,
)
)
return frame.to_sql(tablename, self.conn)
def write(self, splits, mergers, dividends):
"""
Writes data to a SQLite file to be read by SQLiteAdjustmentReader.
Parameters
----------
splits : pandas.DataFrame
Dataframe containing split data.
mergers : pandas.DataFrame
DataFrame containing merger data.
dividends : pandas.DataFrame
DataFrame containing dividend data.
Notes
-----
DataFrame input (`splits`, `mergers`, and `dividends`) should all have
the following columns:
effective_date : int
The date, represented as seconds since Unix epoch, on which the
adjustment should be applied.
ratio : float
A value to apply to all data earlier than the effective date.
sid : int
The asset id associated with this adjustment.
The ratio column is interpreted as follows:
- For all adjustment types, multiply price fields ('open', 'high',
'low', and 'close') by the ratio.
- For **splits only**, **divide** volume by the adjustment ratio.
Dividend ratios should be calculated as
1.0 - (dividend_value / "close on day prior to dividend ex_date").
Returns
-------
None
See Also
--------
SQLiteAdjustmentReader : Consumer for the data written by this class
"""
self.write_frame('splits', splits)
self.write_frame('mergers', mergers)
self.write_frame('dividends', dividends)
self.conn.execute(
"CREATE INDEX splits_sids "
"ON splits(sid)"
)
self.conn.execute(
"CREATE INDEX splits_effective_date "
"ON splits(effective_date)"
)
self.conn.execute(
"CREATE INDEX mergers_sids "
"ON mergers(sid)"
)
self.conn.execute(
"CREATE INDEX mergers_effective_date "
"ON mergers(effective_date)"
)
self.conn.execute(
"CREATE INDEX dividends_sid "
"ON dividends(sid)"
)
self.conn.execute(
"CREATE INDEX dividends_effective_date "
"ON dividends(effective_date)"
)
def close(self):
self.conn.close()
class SQLiteAdjustmentReader(object):
"""
Loads adjustments based on corporate actions from a SQLite database.
Expects data written in the format output by `SQLiteAdjustmentWriter`.
Parameters
----------
conn : str or sqlite3.Connection
Connection from which to load data.
"""
def __init__(self, conn):
if isinstance(conn, str):
conn = sqlite3.connect(conn)
self.conn = conn
def load_adjustments(self, columns, dates, assets):
return load_adjustments_from_sqlite(
self.conn,
[column.name for column in columns],
dates,
assets,
)
class USEquityPricingLoader(FFCLoader):
"""
FFCLoader for US Equity Pricing
Delegates loading of baselines and adjustments.
"""
def __init__(self, raw_price_loader, adjustments_loader):
self.raw_price_loader = raw_price_loader
self.adjustments_loader = adjustments_loader
def load_adjusted_array(self, columns, mask):
dates, assets = mask.index, mask.columns
raw_arrays = self.raw_price_loader.load_raw_arrays(
columns,
dates,
assets,
)
adjustments = self.adjustments_loader.load_adjustments(
columns,
dates,
assets,
)
return [
adjusted_array(raw_array, mask.values, col_adjustments)
for raw_array, col_adjustments in zip(raw_arrays, adjustments)
]
<file_sep>"""
Compute Engine for FFC API
"""
from abc import (
ABCMeta,
abstractmethod,
)
from operator import and_
from six import (
iteritems,
with_metaclass,
)
from six.moves import (
reduce,
zip,
zip_longest,
)
from networkx import (
DiGraph,
get_node_attributes,
topological_sort,
)
from numpy import (
add,
empty_like,
)
from pandas import (
DataFrame,
date_range,
MultiIndex,
)
from zipline.lib.adjusted_array import ensure_ndarray
from zipline.errors import NoFurtherDataError
from zipline.modelling.factor import Factor
from zipline.modelling.filter import Filter
# TODO: Move this somewhere else.
class CyclicDependency(Exception):
pass
def build_dependency_graph(terms):
"""
Build a dependency graph containing the given terms and their dependencies.
Parameters
----------
terms : iterable
An iterable of zipline.modelling.term.Term.
Returns
-------
dependencies : networkx.DiGraph
A directed graph representing the dependencies of the desired inputs.
Each node in the graph has an `extra_rows` attribute, indicating how
many, if any, extra rows we should compute for the node. Extra rows
are most often needed when a term is an input to a rolling window
computation. For example, if we compute a 30 day moving average of
price from day X to day Y, we need to load price data for the range
from day (X - 29) to day Y.
"""
dependencies = DiGraph()
parents = set()
for term in terms:
_add_to_graph(
term,
dependencies,
parents,
extra_rows=0,
)
# No parents should be left between top-level terms.
assert not parents
return dependencies
def _add_to_graph(term,
dependencies,
parents,
extra_rows):
"""
Add the term and all its inputs to dependencies.
"""
# If we've seen this node already as a parent of the current traversal,
# it means we have an unsatisifiable dependency. This should only be
# possible if the term's inputs are mutated after construction.
if term in parents:
raise CyclicDependency(term)
parents.add(term)
try:
existing = dependencies.node[term]
except KeyError:
# We're not yet in the graph: add the term with the specified number of
# extra rows.
dependencies.add_node(term, extra_rows=extra_rows)
else:
# We're already in the graph because we've been traversed by
# another parent. Ensure that we have enough extra rows to satisfy
# all of our parents.
existing['extra_rows'] = max(extra_rows, existing['extra_rows'])
for subterm in term.inputs:
_add_to_graph(
subterm,
dependencies,
parents,
extra_rows=extra_rows + term.extra_input_rows,
)
dependencies.add_edge(subterm, term)
parents.remove(term)
class FFCEngine(with_metaclass(ABCMeta)):
@abstractmethod
def factor_matrix(self, terms, start_date, end_date):
"""
Compute values for `terms` between `start_date` and `end_date`.
Returns a DataFrame with a MultiIndex of (date, asset) pairs on the
index. On each date, we return a row for each asset that passed all
instances of `Filter` in `terms, and the columns of the returned frame
will be the keys in `terms` whose values are instances of `Factor`.
Parameters
----------
terms : dict
Map from str -> zipline.modelling.term.Term.
start_date : datetime
The first date of the matrix.
end_date : datetime
The last date of the matrix.
Returns
-------
matrix : pd.DataFrame
A matrix of factors
"""
raise NotImplementedError("factor_matrix")
class NoOpFFCEngine(FFCEngine):
"""
FFCEngine that doesn't do anything.
"""
def factor_matrix(self, terms, start, end):
return DataFrame(
index=MultiIndex.from_product(
[date_range(start=start, end=end, freq='D'), ()],
),
columns=sorted(terms.keys())
)
class SimpleFFCEngine(object):
"""
FFC Engine class that computes each term independently.
Parameters
----------
loader : FFCLoader
A loader to use to retrieve raw data for atomic terms.
calendar : DatetimeIndex
Array of dates to consider as trading days when computing a range
between a fixed start and end.
asset_finder : zipline.assets.AssetFinder
An AssetFinder instance. We depend on the AssetFinder to determine
which assets are in the top-level universe at any point in time.
"""
__slots__ = [
'_loader',
'_calendar',
'_finder',
'__weakref__',
]
def __init__(self, loader, calendar, asset_finder):
self._loader = loader
self._calendar = calendar
self._finder = asset_finder
def factor_matrix(self, terms, start_date, end_date):
"""
Compute a factor matrix.
Parameters
----------
terms : dict[str -> zipline.modelling.term.Term]
Dict mapping term names to instances. The supplied names are used
as column names in our output frame.
start_date : pd.Timestamp
Start date of the computed matrix.
end_date : pd.Timestamp
End date of the computed matrix.
The algorithm implemented here can be broken down into the following
stages:
0. Build a dependency graph of all terms in `terms`. Topologically
sort the graph to determine an order in which we can compute the terms.
1. Ask our AssetFinder for a "lifetimes matrix", which should contain,
for each date between start_date and end_date, a boolean value for each
known asset indicating whether the asset existed on that date.
2. Compute each term in the dependency order determined in (0), caching
the results in a a dictionary to that they can be fed into future
terms.
3. For each date, determine the number of assets passing **all**
filters. The sum, N, of all these values is the total number of rows in
our output frame, so we pre-allocate an output array of length N for
each factor in `terms`.
4. Fill in the arrays allocated in (3) by copying computed values from
our output cache into the corresponding rows.
5. Stick the values computed in (4) into a DataFrame and return it.
Step 0 is performed in `build_dependency_graph`.
Step 1 is performed in `self.build_lifetimes_matrix`.
Step 2 is performed in `self.compute_chunk`.
Steps 3, 4, and 5 are performed in self._format_factor_matrix.
See Also
--------
FFCEngine.factor_matrix
"""
if end_date <= start_date:
raise ValueError(
"start_date must be before end_date \n"
"start_date=%s, end_date=%s" % (start_date, end_date)
)
graph = build_dependency_graph(terms.values())
ordered_terms = topological_sort(graph)
extra_row_counts = get_node_attributes(graph, 'extra_rows')
max_extra_rows = max(extra_row_counts.values())
lifetimes = self.build_lifetimes_matrix(
start_date,
end_date,
max_extra_rows,
)
lifetimes_between_dates = lifetimes[max_extra_rows:]
dates = lifetimes_between_dates.index.values
assets = lifetimes_between_dates.columns.values
raw_outputs = self.compute_chunk(
ordered_terms,
extra_row_counts,
lifetimes,
)
# We only need filters and factors to compute the final output matrix.
raw_filters = [lifetimes_between_dates.values]
raw_factors = []
factor_names = []
for name, term in iteritems(terms):
extra = extra_row_counts[term]
if isinstance(term, Factor):
factor_names.append(name)
raw_factors.append(raw_outputs[term][extra:])
elif isinstance(term, Filter):
raw_filters.append(raw_outputs[term][extra:])
return self._format_factor_matrix(
dates,
assets,
raw_filters,
raw_factors,
factor_names,
)
def build_lifetimes_matrix(self, start_date, end_date, extra_rows):
"""
Compute a lifetimes matrix from our AssetFinder, then drop columns that
didn't exist at all during the query dates.
Parameters
----------
start_date : pd.Timestamp
Base start date for the matrix.
end_date : pd.Timestamp
End date for the matrix.
extra_rows : int
Number of rows prior to `start_date` to include.
Extra rows are needed by terms like moving averages that require a
trailing window of data to compute.
Returns
-------
lifetimes : pd.DataFrame
Frame of dtype `bool` containing dates from `extra_rows` days
before `start_date`, continuing through to `end_date`. The
returned frame contains as columns all assets in our AssetFinder
that existed for at least one day between `start_date` and
`end_date`.
"""
calendar = self._calendar
finder = self._finder
start_idx, end_idx = self._calendar.slice_locs(start_date, end_date)
if start_idx < extra_rows:
raise NoFurtherDataError(
msg="Insufficient data to compute FFC Matrix: "
"start date was %s, "
"earliest known date was %s, "
"and %d extra rows were requested." % (
start_date, calendar[0], extra_rows,
),
)
# Build lifetimes matrix reaching back as far start_date plus
# max_extra_rows.
lifetimes = finder.lifetimes(
calendar[start_idx - extra_rows:end_idx]
)
assert lifetimes.index[extra_rows] == start_date
assert lifetimes.index[-1] == end_date
if not lifetimes.columns.unique:
columns = lifetimes.columns
duplicated = columns[columns.duplicated()].unique()
raise AssertionError("Duplicated sids: %d" % duplicated)
# Filter out columns that didn't exist between the requested start and
# end dates.
existed = lifetimes.iloc[extra_rows:].any()
return lifetimes.loc[:, existed]
def _inputs_for_term(self, term, workspace, extra_row_counts):
"""
Compute inputs for the given term.
This is mostly complicated by the fact that for each input we store
as many rows as will be necessary to serve any term requiring that
input. Thus if Factor A needs 5 extra rows of price, and Factor B
needs 3 extra rows of price, we need to remove 2 leading rows from our
stored prices before passing them to Factor B.
"""
term_extra_rows = term.extra_input_rows
if term.windowed:
return [
workspace[input_].traverse(
term.window_length,
offset=extra_row_counts[input_] - term_extra_rows
)
for input_ in term.inputs
]
else:
return [
ensure_ndarray(
workspace[input_][
extra_row_counts[input_] - term_extra_rows:
],
)
for input_ in term.inputs
]
def compute_chunk(self, ordered_terms, extra_row_counts, base_mask):
"""
Compute the FFC terms in the graph based on the assets and dates
defined by base_mask.
Returns a dictionary mapping terms to computed arrays.
"""
loader = self._loader
max_extra_rows = max(extra_row_counts.values())
workspace = {term: None for term in ordered_terms}
for term in ordered_terms:
base_mask_for_term = base_mask.iloc[
max_extra_rows - extra_row_counts[term]:
]
if term.atomic:
# FUTURE OPTIMIZATION: Scan the resolution order for terms in
# the same dataset and load them here as well.
to_load = [term]
loaded = loader.load_adjusted_array(
to_load,
base_mask_for_term,
)
for loaded_term, adj_array in zip_longest(to_load, loaded):
workspace[loaded_term] = adj_array
else:
if term.windowed:
compute = term.compute_from_windows
else:
compute = term.compute_from_arrays
workspace[term] = compute(
self._inputs_for_term(term, workspace, extra_row_counts),
base_mask_for_term,
)
return workspace
def _format_factor_matrix(self,
dates,
assets,
filter_data,
factor_data,
factor_names):
"""
Convert raw computed filters/factors into a DataFrame for public APIs.
Parameters
----------
dates : np.array[datetime64]
Index for raw data in filter_data/factor_data.
assets : np.array[int64]
Column labels for raw data in filter_data/factor_data.
filter_data : list[ndarray[bool]]
Raw filters data.
factor_data : list[ndarray]
Raw factor data.
factor_names : list[str]
Names of factors to use as keys.
Returns
-------
factor_matrix : pd.DataFrame
A DataFrame with the following indices:
index : two-tiered MultiIndex of (date, asset). For each date, we
return a row for each asset that passed all filters on that
date.
columns : keys from `factor_data`
Each date/asset/factor triple contains the computed value of the given
factor on the given date for the given asset.
"""
# FUTURE OPTIMIZATION: Cythonize all of this.
# Boolean mask of values that passed all filters.
unioned = reduce(and_, filter_data)
# Parallel arrays of (x,y) coords for all date/asset pairs that passed
# all filters. Each entry here will correspond to a row in our output
# frame.
nonzero_xs, nonzero_ys = unioned.nonzero()
raw_dates_index = empty_like(nonzero_xs, dtype='datetime64[ns]')
raw_assets_index = empty_like(nonzero_xs, dtype=int)
factor_outputs = [
empty_like(nonzero_xs, dtype=factor.dtype)
for factor in factor_data
]
# This is tricky.
# unioned.sum(axis=1) gives us an array of the same size as `dates`
# containing, for each date, the number of assets that passed our
# filters on that date.
# Running this through add.accumulate gives us an array containing, for
# each date, the running total of the number of assets that passed our
# filters on or before that date.
# This means that (bounds[i - 1], bounds[i]) gives us the slice bounds
# of rows in our output DataFrame corresponding to each date.
dt_start = 0
bounds = add.accumulate(unioned.sum(axis=1))
for dt_idx, dt_end in enumerate(bounds):
row_bounds = slice(dt_start, dt_end)
column_indices = nonzero_ys[row_bounds]
raw_dates_index[row_bounds] = dates[dt_idx]
raw_assets_index[row_bounds] = assets[column_indices]
for computed, output in zip(factor_data, factor_outputs):
output[row_bounds] = computed[dt_idx, column_indices]
# Upper bound of current row becomes lower bound for next row.
dt_start = dt_end
return DataFrame(
dict(zip(factor_names, factor_outputs)),
index=MultiIndex.from_arrays(
[
raw_dates_index,
# FUTURE OPTIMIZATION:
# Avoid duplicate lookups by grouping and only looking up
# each unique sid once.
self._finder.retrieve_all(raw_assets_index),
],
)
).tz_localize('UTC', level=0)
<file_sep>from __future__ import print_function
import sys
import doctest
from unittest import TestCase
from zipline.lib import adjustment
from zipline.modelling import (
engine,
expression,
)
from zipline.utils import (
lazyval,
test_utils,
)
class DoctestTestCase(TestCase):
@classmethod
def setUpClass(cls):
import pdb
# Workaround for the issue addressed by this (unmerged) PR to pdbpp:
# https://bitbucket.org/antocuni/pdb/pull-request/40/fix-ensure_file_can_write_unicode/diff # noqa
if '_pdbpp_path_hack' in pdb.__file__:
cls._skip = True
else:
cls._skip = False
def _check_docs(self, module):
if self._skip:
# Printing this directly to __stdout__ so that it doesn't get
# captured by nose.
print("Warning: Skipping doctests for %s because "
"pdbpp is installed." % module.__name__, file=sys.__stdout__)
return
try:
doctest.testmod(module, verbose=True, raise_on_error=True)
except doctest.UnexpectedException as e:
raise e.exc_info[1]
def test_adjustment_docs(self):
self._check_docs(adjustment)
def test_expression_docs(self):
self._check_docs(expression)
def test_engine_docs(self):
self._check_docs(engine)
def test_lazyval_docs(self):
self._check_docs(lazyval)
def test_test_utils_docs(self):
self._check_docs(test_utils)
<file_sep>"""
Tests for Factor terms.
"""
from unittest import TestCase
from numpy import (
array,
)
from numpy.testing import assert_array_equal
from pandas import (
DataFrame,
date_range,
Int64Index,
)
from six import iteritems
from zipline.errors import UnknownRankMethod
from zipline.modelling.factor import TestingFactor
class F(TestingFactor):
inputs = ()
window_length = 0
class FactorTestCase(TestCase):
def setUp(self):
self.f = F()
self.dates = date_range('2014-01-01', periods=5, freq='D')
self.assets = Int64Index(range(5))
self.mask = DataFrame(True, index=self.dates, columns=self.assets)
def tearDown(self):
pass
def test_bad_input(self):
with self.assertRaises(UnknownRankMethod):
self.f.rank("not a real rank method")
def test_rank(self):
# Generated with:
# data = arange(25).reshape(5, 5).transpose() % 4
data = array([[0, 1, 2, 3, 0],
[1, 2, 3, 0, 1],
[2, 3, 0, 1, 2],
[3, 0, 1, 2, 3],
[0, 1, 2, 3, 0]])
expected_ranks = {
'ordinal': array([[1., 3., 4., 5., 2.],
[2., 4., 5., 1., 3.],
[3., 5., 1., 2., 4.],
[4., 1., 2., 3., 5.],
[1., 3., 4., 5., 2.]]),
'average': array([[1.5, 3., 4., 5., 1.5],
[2.5, 4., 5., 1., 2.5],
[3.5, 5., 1., 2., 3.5],
[4.5, 1., 2., 3., 4.5],
[1.5, 3., 4., 5., 1.5]]),
'min': array([[1., 3., 4., 5., 1.],
[2., 4., 5., 1., 2.],
[3., 5., 1., 2., 3.],
[4., 1., 2., 3., 4.],
[1., 3., 4., 5., 1.]]),
'max': array([[2., 3., 4., 5., 2.],
[3., 4., 5., 1., 3.],
[4., 5., 1., 2., 4.],
[5., 1., 2., 3., 5.],
[2., 3., 4., 5., 2.]]),
'dense': array([[1., 2., 3., 4., 1.],
[2., 3., 4., 1., 2.],
[3., 4., 1., 2., 3.],
[4., 1., 2., 3., 4.],
[1., 2., 3., 4., 1.]]),
}
# Test with the default, which should be 'ordinal'.
default_result = self.f.rank().compute_from_arrays([data], self.mask)
assert_array_equal(default_result, expected_ranks['ordinal'])
# Test with each method passed explicitly.
for method, expected_result in iteritems(expected_ranks):
result = self.f.rank(method=method).compute_from_arrays(
[data],
self.mask,
)
assert_array_equal(result, expected_ranks[method])
<file_sep>"""
Base class for FFC data loaders.
"""
from abc import (
ABCMeta,
abstractmethod,
)
from six import with_metaclass
class FFCLoader(with_metaclass(ABCMeta)):
"""
ABC for classes that can load data for use with zipline.modelling pipeline.
TODO: DOCUMENT THIS MORE!
"""
@abstractmethod
def load_adjusted_array(self, columns, mask):
pass
<file_sep>"""
An immutable, lazily loaded value descriptor.
"""
from weakref import WeakKeyDictionary
class lazyval(object):
"""
Decorator that marks that an attribute should not be computed until
needed, and that the value should be memoized.
Example
-------
>>> from zipline.utils.lazyval import lazyval
>>> class C(object):
... def __init__(self):
... self.count = 0
... @lazyval
... def val(self):
... self.count += 1
... return "val"
...
>>> c = C()
>>> c.count
0
>>> c.val, c.count
('val', 1)
>>> c.val, c.count
('val', 1)
"""
def __init__(self, get):
self._get = get
self._cache = WeakKeyDictionary()
def __get__(self, instance, owner):
if instance is None:
return self
try:
return self._cache[instance]
except KeyError:
self._cache[instance] = val = self._get(instance)
return val
<file_sep>"""
Tests for SimpleFFCEngine
"""
from __future__ import division
from unittest import TestCase
from itertools import product
from numpy import (
full,
isnan,
nan,
)
from numpy.testing import assert_array_equal
from pandas import (
DataFrame,
date_range,
Int64Index,
MultiIndex,
rolling_mean,
Series,
Timestamp,
)
from pandas.util.testing import assert_frame_equal
from testfixtures import TempDirectory
from zipline.assets import AssetFinder
from zipline.data.equities import USEquityPricing
from zipline.data.ffc.synthetic import (
ConstantLoader,
MultiColumnLoader,
NullAdjustmentReader,
SyntheticDailyBarWriter,
)
from zipline.data.ffc.frame import (
DataFrameFFCLoader,
MULTIPLY,
)
from zipline.data.ffc.loaders.us_equity_pricing import (
BcolzDailyBarReader,
USEquityPricingLoader,
)
from zipline.finance.trading import TradingEnvironment
from zipline.modelling.engine import SimpleFFCEngine
from zipline.modelling.factor import TestingFactor
from zipline.modelling.factor.technical import (
MaxDrawdown,
SimpleMovingAverage,
)
from zipline.utils.lazyval import lazyval
from zipline.utils.test_utils import (
make_rotating_asset_info,
make_simple_asset_info,
product_upper_triangle,
check_arrays,
)
class RollingSumDifference(TestingFactor):
window_length = 3
inputs = [USEquityPricing.open, USEquityPricing.close]
def from_windows(self, open, close):
return (open - close).sum(axis=0)
def assert_product(case, index, *levels):
"""Assert that a MultiIndex contains the product of `*levels`."""
case.assertIsInstance(index, MultiIndex, "%s is not a MultiIndex" % index)
case.assertEqual(set(index), set(product(*levels)))
class ConstantInputTestCase(TestCase):
def setUp(self):
self.constants = {
# Every day, assume every stock starts at 2, goes down to 1,
# goes up to 4, and finishes at 3.
USEquityPricing.low: 1,
USEquityPricing.open: 2,
USEquityPricing.close: 3,
USEquityPricing.high: 4,
}
self.assets = [1, 2, 3]
self.dates = date_range('2014-01-01', '2014-02-01', freq='D', tz='UTC')
self.loader = ConstantLoader(
constants=self.constants,
dates=self.dates,
assets=self.assets,
)
self.asset_info = make_simple_asset_info(
self.assets,
start_date=self.dates[0],
end_date=self.dates[-1],
)
self.asset_finder = AssetFinder(self.asset_info)
def test_bad_dates(self):
loader = self.loader
engine = SimpleFFCEngine(loader, self.dates, self.asset_finder)
msg = "start_date must be before end_date .*"
with self.assertRaisesRegexp(ValueError, msg):
engine.factor_matrix({}, self.dates[2], self.dates[1])
with self.assertRaisesRegexp(ValueError, msg):
engine.factor_matrix({}, self.dates[2], self.dates[2])
def test_single_factor(self):
loader = self.loader
finder = self.asset_finder
assets = self.assets
engine = SimpleFFCEngine(loader, self.dates, self.asset_finder)
result_shape = (num_dates, num_assets) = (5, len(assets))
dates = self.dates[10:10 + num_dates]
factor = RollingSumDifference()
result = engine.factor_matrix({'f': factor}, dates[0], dates[-1])
self.assertEqual(set(result.columns), {'f'})
assert_product(self, result.index, dates, finder.retrieve_all(assets))
assert_array_equal(
result['f'].unstack().values,
full(result_shape, -factor.window_length),
)
def test_multiple_rolling_factors(self):
loader = self.loader
finder = self.asset_finder
assets = self.assets
engine = SimpleFFCEngine(loader, self.dates, self.asset_finder)
shape = num_dates, num_assets = (5, len(assets))
dates = self.dates[10:10 + num_dates]
short_factor = RollingSumDifference(window_length=3)
long_factor = RollingSumDifference(window_length=5)
high_factor = RollingSumDifference(
window_length=3,
inputs=[USEquityPricing.open, USEquityPricing.high],
)
results = engine.factor_matrix(
{'short': short_factor, 'long': long_factor, 'high': high_factor},
dates[0],
dates[-1],
)
self.assertEqual(set(results.columns), {'short', 'high', 'long'})
assert_product(self, results.index, dates, finder.retrieve_all(assets))
# row-wise sum over an array whose values are all (1 - 2)
assert_array_equal(
results['short'].unstack().values,
full(shape, -short_factor.window_length),
)
assert_array_equal(
results['long'].unstack().values,
full(shape, -long_factor.window_length),
)
# row-wise sum over an array whose values are all (1 - 3)
assert_array_equal(
results['high'].unstack().values,
full(shape, -2 * high_factor.window_length),
)
def test_numeric_factor(self):
constants = self.constants
loader = self.loader
engine = SimpleFFCEngine(loader, self.dates, self.asset_finder)
num_dates = 5
dates = self.dates[10:10 + num_dates]
high, low = USEquityPricing.high, USEquityPricing.low
open, close = USEquityPricing.open, USEquityPricing.close
high_minus_low = RollingSumDifference(inputs=[high, low])
open_minus_close = RollingSumDifference(inputs=[open, close])
avg = (high_minus_low + open_minus_close) / 2
results = engine.factor_matrix(
{
'high_low': high_minus_low,
'open_close': open_minus_close,
'avg': avg,
},
dates[0],
dates[-1],
)
high_low_result = results['high_low'].unstack()
expected_high_low = 3.0 * (constants[high] - constants[low])
assert_frame_equal(
high_low_result,
DataFrame(
expected_high_low,
index=dates,
columns=self.assets,
)
)
open_close_result = results['open_close'].unstack()
expected_open_close = 3.0 * (constants[open] - constants[close])
assert_frame_equal(
open_close_result,
DataFrame(
expected_open_close,
index=dates,
columns=self.assets,
)
)
avg_result = results['avg'].unstack()
expected_avg = (expected_high_low + expected_open_close) / 2.0
assert_frame_equal(
avg_result,
DataFrame(
expected_avg,
index=dates,
columns=self.assets,
)
)
class FrameInputTestCase(TestCase):
def setUp(self):
env = TradingEnvironment.instance()
day = env.trading_day
self.assets = Int64Index([1, 2, 3])
self.dates = date_range(
'2015-01-01',
'2015-01-31',
freq=day,
tz='UTC',
)
asset_info = make_simple_asset_info(
self.assets,
start_date=self.dates[0],
end_date=self.dates[-1],
)
self.asset_finder = AssetFinder(asset_info)
@lazyval
def base_mask(self):
return self.make_frame(True)
def make_frame(self, data):
return DataFrame(data, columns=self.assets, index=self.dates)
def test_compute_with_adjustments(self):
dates, assets = self.dates, self.assets
low, high = USEquityPricing.low, USEquityPricing.high
apply_idxs = [3, 10, 16]
def apply_date(idx, offset=0):
return dates[apply_idxs[idx] + offset]
adjustments = DataFrame.from_records(
[
dict(
kind=MULTIPLY,
sid=assets[1],
value=2.0,
start_date=None,
end_date=apply_date(0, offset=-1),
apply_date=apply_date(0),
),
dict(
kind=MULTIPLY,
sid=assets[1],
value=3.0,
start_date=None,
end_date=apply_date(1, offset=-1),
apply_date=apply_date(1),
),
dict(
kind=MULTIPLY,
sid=assets[1],
value=5.0,
start_date=None,
end_date=apply_date(2, offset=-1),
apply_date=apply_date(2),
),
]
)
low_base = DataFrame(self.make_frame(30.0))
low_loader = DataFrameFFCLoader(low, low_base.copy(), adjustments=None)
# Pre-apply inverse of adjustments to the baseline.
high_base = DataFrame(self.make_frame(30.0))
high_base.iloc[:apply_idxs[0], 1] /= 2.0
high_base.iloc[:apply_idxs[1], 1] /= 3.0
high_base.iloc[:apply_idxs[2], 1] /= 5.0
high_loader = DataFrameFFCLoader(high, high_base, adjustments)
loader = MultiColumnLoader({low: low_loader, high: high_loader})
engine = SimpleFFCEngine(loader, self.dates, self.asset_finder)
for window_length in range(1, 4):
low_mavg = SimpleMovingAverage(
inputs=[USEquityPricing.low],
window_length=window_length,
)
high_mavg = SimpleMovingAverage(
inputs=[USEquityPricing.high],
window_length=window_length,
)
bounds = product_upper_triangle(range(window_length, len(dates)))
for start, stop in bounds:
results = engine.factor_matrix(
{'low': low_mavg, 'high': high_mavg},
dates[start],
dates[stop],
)
self.assertEqual(set(results.columns), {'low', 'high'})
iloc_bounds = slice(start, stop + 1) # +1 to include end date
low_results = results.unstack()['low']
assert_frame_equal(low_results, low_base.iloc[iloc_bounds])
high_results = results.unstack()['high']
assert_frame_equal(high_results, high_base.iloc[iloc_bounds])
class SyntheticBcolzTestCase(TestCase):
@classmethod
def setUpClass(cls):
cls.first_asset_start = Timestamp('2015-04-01', tz='UTC')
cls.env = TradingEnvironment.instance()
cls.trading_day = cls.env.trading_day
cls.asset_info = make_rotating_asset_info(
num_assets=6,
first_start=cls.first_asset_start,
frequency=cls.trading_day,
periods_between_starts=4,
asset_lifetime=8,
)
cls.all_assets = cls.asset_info.index
cls.all_dates = date_range(
start=cls.first_asset_start,
end=cls.asset_info['end_date'].max(),
freq=cls.trading_day,
)
cls.finder = AssetFinder(cls.asset_info)
cls.temp_dir = TempDirectory()
cls.temp_dir.create()
cls.writer = SyntheticDailyBarWriter(
asset_info=cls.asset_info[['start_date', 'end_date']],
calendar=cls.all_dates,
)
table = cls.writer.write(
cls.temp_dir.getpath('testdata.bcolz'),
cls.all_dates,
cls.all_assets,
)
cls.ffc_loader = USEquityPricingLoader(
BcolzDailyBarReader(table),
NullAdjustmentReader(),
)
@classmethod
def tearDownClass(cls):
cls.temp_dir.cleanup()
def test_SMA(self):
engine = SimpleFFCEngine(
self.ffc_loader,
self.env.trading_days,
self.finder,
)
dates, assets = self.all_dates, self.all_assets
window_length = 5
SMA = SimpleMovingAverage(
inputs=(USEquityPricing.close,),
window_length=window_length,
)
results = engine.factor_matrix(
{'sma': SMA},
dates[window_length],
dates[-1],
)
raw_closes = self.writer.expected_values_2d(dates, assets, 'close')
expected_sma_result = rolling_mean(
raw_closes,
window_length,
min_periods=1,
)
expected_sma_result[isnan(raw_closes)] = nan
expected_sma_result = expected_sma_result[window_length:]
sma_result = results['sma'].unstack()
assert_frame_equal(
sma_result,
DataFrame(
expected_sma_result,
index=dates[window_length:],
columns=assets,
),
)
def test_drawdown(self):
# The monotonically-increasing data produced by SyntheticDailyBarWriter
# exercises two pathological cases for MaxDrawdown. The actual
# computed results are pretty much useless (everything is either NaN)
# or zero, but verifying we correctly handle those corner cases is
# valuable.
engine = SimpleFFCEngine(
self.ffc_loader,
self.env.trading_days,
self.finder,
)
dates, assets = self.all_dates, self.all_assets
window_length = 5
drawdown = MaxDrawdown(
inputs=(USEquityPricing.close,),
window_length=window_length,
)
results = engine.factor_matrix(
{'drawdown': drawdown},
dates[window_length],
dates[-1],
)
dd_result = results['drawdown']
# We expect NaNs when the asset was undefined, otherwise 0 everywhere,
# since the input is always increasing.
expected = self.writer.expected_values_2d(dates, assets, 'close')
expected[~isnan(expected)] = 0
expected = expected[window_length:]
assert_frame_equal(
dd_result.unstack(),
DataFrame(
expected,
index=dates[window_length:],
columns=assets,
),
)
class MultiColumnLoaderTestCase(TestCase):
def setUp(self):
self.assets = [1, 2, 3]
self.dates = date_range('2014-01-01', '2014-02-01', freq='D', tz='UTC')
asset_info = make_simple_asset_info(
self.assets,
start_date=self.dates[0],
end_date=self.dates[-1],
)
self.asset_finder = AssetFinder(asset_info)
def test_engine_with_multicolumn_loader(self):
open_, close = USEquityPricing.open, USEquityPricing.close
loader = MultiColumnLoader({
open_: ConstantLoader(dates=self.dates,
assets=self.assets,
constants={open_: 1}),
close: ConstantLoader(dates=self.dates,
assets=self.assets,
constants={close: 2})
})
engine = SimpleFFCEngine(loader, self.dates, self.asset_finder)
factor = RollingSumDifference()
result = engine.factor_matrix({'f': factor},
self.dates[2],
self.dates[-1])
self.assertIsNotNone(result)
self.assertEqual({'f'}, set(result.columns))
# (close - open) * window = (1 - 2) * 3 = -3
# skipped 2 from the start, so that the window is full
check_arrays(result['f'],
Series([-3] * len(self.assets) * (len(self.dates) - 2)))
| a940ce3d4a41df1964055851943def7d5ab1dc1d | [
"Python"
] | 13 | Python | joequant/zipline | dd1f9221f937909b4d99be3e44ba0e4f8c19662e | 865ed2241a9c5650866a03532a576e70c71a980e |
refs/heads/master | <repo_name>SudolSzymon/SzyomnPongApp<file_sep>/MyPong/app/src/main/java/com/example/szymon/mypong/Bat.java
package com.example.szymon.mypong;
import android.graphics.RectF;
/**
* Created by Szymon on 26/03/2018.
*/
public class Bat {
private RectF rect;
private float length;
private float height;
private int batSpeed;
private float xCoord;
private float yCoord;
public final int STOPPED = 0;
public final int LEFT = 1;
public final int RIGHT = 2;
private int batMoving = STOPPED;
private int screenX;
private int screenY;
public Bat(int x, int y, int bar) {
screenX = x;
screenY = y;
length = screenX / 8;
height = screenY / 50;
xCoord = screenX / 2;
yCoord = screenY - bar;
rect = new RectF(xCoord, yCoord, xCoord + length, yCoord + height);
batSpeed = screenX;
}
public RectF getRect(){
return rect;
}
public void setMovementState(int state){
batMoving = state;
}
public void update(long fps){
if(batMoving == LEFT){
xCoord = xCoord - batSpeed / fps;
}
if(batMoving == RIGHT){
xCoord = xCoord + batSpeed / fps;
}
if(rect.left < 0){ xCoord = 0; } if(rect.right > screenX){
xCoord = screenX - length;
}
rect.left = xCoord;
rect.right = xCoord + length;
}
}
| b06dd802f40dfec8b5c686ef8f8885471bb9e961 | [
"Java"
] | 1 | Java | SudolSzymon/SzyomnPongApp | faeec9f867240407011560f8ed62e5ae3d2edcb5 | d6df7806f3670e9511a10d0d96e6e10bcdc6b68a |
refs/heads/master | <file_sep>package com.mapache.scorebasketdatabinding
import android.arch.lifecycle.ViewModelProviders
import android.databinding.DataBindingUtil
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.view.View
import com.mapache.scorebasketdatabinding.databinding.ActivityMainBinding
import com.mapache.scorebasketdatabinding.pojos.Score
class MainActivity : AppCompatActivity() {
lateinit var score : Score
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
score = ViewModelProviders.of(this).get(Score::class.java)
if(score.scoreA.get() == null) score.scoreA.set("0")
if(score.scoreB.get() == null) score.scoreB.set("0")
var binding : ActivityMainBinding = DataBindingUtil.setContentView(this, R.layout.activity_main)
binding.score = score
}
fun scorePlus(view : View){
when(view.id){
R.id.teamA_1 -> score.scoreA.set((score.scoreA.get()!!.toInt()+1).toString())
R.id.teamA_2 -> score.scoreA.set((score.scoreA.get()!!.toInt()+2).toString())
R.id.teamA_3 -> score.scoreA.set((score.scoreA.get()!!.toInt()+3).toString())
R.id.teamB_1 -> score.scoreB.set((score.scoreB.get()!!.toInt()+1).toString())
R.id.teamB_2 -> score.scoreB.set((score.scoreB.get()!!.toInt()+2).toString())
R.id.teamB_3 -> score.scoreB.set((score.scoreB.get()!!.toInt()+3).toString())
}
}
fun resetScore(view: View){
score.scoreA.set("0")
score.scoreB.set("0")
}
}
<file_sep>package com.mapache.scorebasketdatabinding.pojos
import android.arch.lifecycle.ViewModel
import android.databinding.ObservableField
class Score : ViewModel() {
var scoreA = ObservableField<String>()
var scoreB = ObservableField<String>()
} | 27d0bc963287ba908cfe00afe1ddb47f34c18098 | [
"Kotlin"
] | 2 | Kotlin | MaPache07/Score_Basket_Databinding | f0148e3d000341cbdd0880b20e4e9fea9caf48c0 | 82d7a6cf01d0c67daffa3817bedb746202585d33 |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
using System.Drawing;
namespace DAT_Unpacker
{
static class Extension
{
public static int extractInt32(this byte[] bytes, int index = 0)
{
return (bytes[index + 3] << 24) + (bytes[index + 2] << 16) + (bytes[index + 1] << 8) + bytes[index + 0];
}
public static byte[] extractPiece(this FileStream ms, int offset, int length, int changeOffset = -1)
{
if (changeOffset > -1) ms.Position = changeOffset;
byte[] data = new byte[length];
ms.Read(data, 0, length);
return data;
}
public static byte[] extractPiece(this MemoryStream ms, int offset, int length, int changeOffset = -1)
{
if (changeOffset > -1) ms.Position = changeOffset;
byte[] data = new byte[length];
ms.Read(data, 0, length);
return data;
}
public static void Save(this byte[] data, string path, int offset = -1, int length = -1)
{
int _offset = (offset > -1) ? offset : 0;
int _length = (length > -1) ? length : data.Length;
using (FileStream fs = File.Create(path))
{
fs.Write(data, _offset, _length);
}
}
public static byte[] int32ToByteArray(this int value)
{
byte[] result = new byte[4];
for (int i = 0; i < 4; i++)
{
result[i] = (byte)((value >> i * 8) & 0xFF);
}
return result;
}
public static Int16 extractInt16(this byte[] bytes, int index = 0)
{
return (short)((bytes[index + 1] << 8) + bytes[index + 0]);
}
public static byte[] int16ToByteArray(this short value)
{
byte[] result = new byte[2];
for (int i = 0; i < 2; i++)
{
result[i] = (byte)((value >> i * 8) & 0xFF);
}
return result;
}
public static byte[] copyFrom(this byte[] self, byte[] data, int copyOffset, int length, int destinyOffset = 0)
{
for (int i = copyOffset; i < length; i++)
{
self[destinyOffset + (i - copyOffset)] = data[i];
}
return self;
}
public static Bitmap ProportionallyResizeBitmap(this Bitmap sourceBitmap, int maxWidth, int maxHeight)
{
Size size = sourceBitmap.Size;
int height = (int)((double)(maxWidth) / ((double)sourceBitmap.Width / (double)sourceBitmap.Height));
size = new Size((maxWidth), height);
if (size.Width > maxWidth)
{
height = (int)((double)(maxWidth) / ((double)sourceBitmap.Width / (double)sourceBitmap.Height));
size = new Size((maxWidth), height);
}
if (size.Height > maxHeight)
{
int width = (int)((double)(maxHeight) * ((double)sourceBitmap.Width / (double)sourceBitmap.Height));
size = new Size(width, (maxHeight));
}
Bitmap resizedBitmap = new Bitmap((int)size.Width, (int)size.Height);
using (Graphics g = Graphics.FromImage((System.Drawing.Image)resizedBitmap))
{
g.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.NearestNeighbor;
g.DrawImage(sourceBitmap, 0, 0, (int)size.Width, (int)size.Height);
}
return resizedBitmap;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.IO;
namespace DAT_Unpacker
{
public partial class Main : Form
{
public Main()
{
InitializeComponent();
}
bool codeChange = false;
string _FILEPATH;
ImageList il = new ImageList();
List<Indexes> _INDEX = new List<Indexes>();
Dictionary<int, int> bitpp = new Dictionary<int, int>() {{4, 0}, {8,1},{16,2}};
Dictionary<int, int> bitpp2 = new Dictionary<int, int>() { { 0, 4 }, { 1, 8 }, { 2, 16 } };
TIM tim;
private ColorPallet cp;
private void Unpack()
{
if (_FILEPATH == null) return;
using (FileStream fs = new FileStream(_FILEPATH, FileMode.Open))
{
int fileNum = fs.extractPiece(0, 4, 4).extractInt32() + 1;
int headerSize = fs.extractPiece(0, 4).extractInt32() * 0x800;
int[] offsets = new int[fileNum];
string fileName = Path.GetFileNameWithoutExtension(_FILEPATH);
byte[] data;
MemoryStream header = new MemoryStream(fs.extractPiece(0, headerSize, 0));
header.Position = 8;
for (int i = 0; i < fileNum; i++)
{
offsets[i] = header.extractPiece(0, 4).extractInt32() * 0x800;
_INDEX.Add(new Indexes(offsets[i]));
}
string basePath = Path.Combine(Path.GetDirectoryName(_FILEPATH), fileName);
for (int i = 0; i < fileNum - 1; i++)
{
List<TreeNode> tn = new List<TreeNode>();
data = fs.extractPiece(0, offsets[i + 1] - offsets[i]);
if (data[3] == 0x01 && data[2] < 0x10)
{
int timNum = data.extractInt32(4);
int[] timOffsets = new int[timNum + 1];
for (int x = 0; x < timNum; x++)
{
timOffsets[x] = (data.extractInt32((4 * x) + 8) * 4) + 4;
_INDEX[i].subOffsets.Add(timOffsets[x]);
}
_INDEX[i].subOffsets.Add(data.Length);
timOffsets[timOffsets.Length - 1] = data.Length;
for (int x = 0; x < timOffsets.Length - 1; x++)
{
string timPath = Path.Combine(basePath, String.Format("{0}_{1}", fileName, i));
string ext = (data[timOffsets[x]] == 0x10) ? "TIM" : "BIN";
TreeNode tree = new TreeNode(String.Format("{0}_{1}_{2}.{3}", fileName, i, x, ext));
if (ext == "TIM")
{
tree.ImageIndex = tree.SelectedImageIndex = 1;
tree.Tag = x;
tn.Add(tree);
}
}
}
if (tn != null && tn.Count > 0)
{
TreeNode tree = new TreeNode(String.Format("{0}_{1}.BIN", fileName, i), tn.ToArray());
tree.Tag = i;
treeView1.Nodes.Add(tree);
}
}
}
}
private void btnSelect_Click(object sender, EventArgs e)
{
using (OpenFileDialog ofd = new OpenFileDialog())
{
ofd.Filter = "DAT Files|*.DAT";
if (ofd.ShowDialog() == DialogResult.OK)
{
txtPath.Text = _FILEPATH = ofd.FileName;
Unpack();
}
}
}
void CreateTIM()
{
if (_INDEX.Count == 0) return;
if (treeView1.SelectedNode != null && treeView1.SelectedNode.Parent != null)
{
if (treeView1.SelectedNode.SelectedImageIndex == 0) return;
using (FileStream fs = new FileStream(_FILEPATH, FileMode.Open))
{
if (_INDEX.Count == 0) return;
int parentIndex = (int)treeView1.SelectedNode.Parent.Tag;
int childrenIndex = (int)treeView1.SelectedNode.Tag;
int size = _INDEX[parentIndex].subOffsets[childrenIndex + 1] - _INDEX[parentIndex].subOffsets[childrenIndex];
int offset = _INDEX[parentIndex].offset + _INDEX[parentIndex].subOffsets[childrenIndex];
tim = new TIM(fs.extractPiece(0, size, offset));
changeBPP(bitpp[tim.bpp]);
}
}
ShowTIM();
}
void ShowTIM()
{
if (tim == null) return;
tim.Transparency = checkBox2.Checked;
tim.paletteIndex = (int)nudPallete.Value;
tim.bpp = bitpp2[comboBox1.SelectedIndex];
tim.clutIndex = (int)nudClut.Value;
nudPallete.Maximum = tim.maxPalleteIndex;
nudClut.Maximum = tim.clutNumber > 0 ? tim.clutNumber - 1 : 0;
if(pictureBox1.IsDisposed) pictureBox1.Image.Dispose();
if (checkBox1.Checked)
pictureBox1.Image = tim.CreateUnsafeBitmap().ProportionallyResizeBitmap(pictureBox1.Width, pictureBox1.Height);
else
pictureBox1.Image = tim.CreateUnsafeBitmap();//tim.CreateBitmap();
}
private void checkBox1_CheckedChanged(object sender, EventArgs e)
{
ShowTIM();
}
private void numericUpDown1_ValueChanged(object sender, EventArgs e)
{
if (codeChange) return;
ShowTIM();
}
private void treeView1_AfterSelect(object sender, TreeViewEventArgs e)
{
ResetNUDValue();
CreateTIM();
}
void ResetNUDValue()
{
codeChange = true;
nudPallete.Value = 0;
nudClut.Value = 0;
codeChange = false;
}
void changeBPP(int index)
{
codeChange = true;
comboBox1.SelectedIndex = index;
codeChange = false;
}
private void DUP_Load(object sender, EventArgs e)
{
treeView1.ImageList = imageList1;
treeView1.SelectedImageIndex = -1;
}
private void checkBox2_CheckedChanged(object sender, EventArgs e)
{
ShowTIM();
}
private void comboBox1_SelectedIndexChanged(object sender, EventArgs e)
{
if (codeChange) return;
ResetNUDValue();
ShowTIM();
}
private void nudClut_ValueChanged(object sender, EventArgs e)
{
if (codeChange) return;
ShowTIM();
}
private void button1_Click(object sender, EventArgs e)
{
cp = new ColorPallet();
cp.StartPosition = this.StartPosition;
cp.Show(this);
}
public Color[] GetPallete()
{
return tim.GetActualPalleteInCollors();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Data;
using System.Linq;
using System.Text;
using System.Windows.Forms;
namespace DAT_Unpacker
{
public partial class Pallete : UserControl
{
public Pallete()
{
InitializeComponent();
}
protected override void OnPaint(PaintEventArgs e)
{
}
private void Pallete_Load(object sender, EventArgs e)
{
int i = 0;
for (int y = 0; y < 16; y++)
{
for (int x = 0; x < 16; x++, i++)
{
this.Controls.Add(new ColorSquare(Color.FromArgb(x * 16, x * 16, (x + y) * 8), i));
this.Controls[i].Location = new Point(x * 11, y * 11);
this.Controls[i].BackColorChanged += new EventHandler(Teste_Event);
}
}
}
private void Teste_Event(object sender, EventArgs e)
{
}
}
}
<file_sep>TIMViwer
========
Read the PSX .TIM format
This version analyses the PROT.DAT container from the game Legend of Legaia and show its images.
The algorithm to read and show the .TIM files is included on source.<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
using System.Drawing;
using System.Drawing.Imaging;
namespace DAT_Unpacker
{
class TIM
{
public int _bpp;
public int Width;
public int Heigth;
private int rawWidth;
public short clutNumber = 0;
public int clutIndex = 0;
public int paletteIndex = 0;
public int maxPalleteIndex = 0;
public bool Transparency = false;
private short colorNumber = 0;
private byte[][] clut;// = new byte[10,512];
private long imageDataPosition;
private MemoryStream data;
private Dictionary<int, int> palleteSize = new Dictionary<int, int>() { { 4, 32 }, { 8, 512 }, {16, 0}, {24, 0} };
private Dictionary<int, int> bpptoInt = new Dictionary<int, int>() { { 8, 4 }, { 9, 8 }, {2, 16}, {3, 24} };
public int bpp
{
set
{
_bpp = value;
if(value < 16) this.maxPalleteIndex = (int)Math.Floor(((double)(colorNumber) / (double)palleteSize[value]));
this.Width = GetRealWidth();
}
get { return _bpp; }
}
public TIM(byte[] data)
{
this.data = new MemoryStream(data);
readHeader();
}
private void readHeader()
{
int tmp = this.data.extractPiece(0, 1, 0x4)[0];
this.data.Position = 0x10;
if (tmp != 2 && tmp != 3)
{
this.colorNumber = this.data.extractPiece(0, 2, 0x10).extractInt16();
this.clutNumber = this.data.extractPiece(0, 2).extractInt16();
clut = new byte[clutNumber][];
for (int i = 0; i < clutNumber; i++)
{
clut[i] = new byte[512];
clut[i].copyFrom(this.data.extractPiece(0, colorNumber * 2), 0, colorNumber * 2);
}
//this.clut.copyFrom(this.data.extractPiece(0, colorNumber * clutNumber * 2), 0, colorNumber * clutNumber * 2);
this.data.Position += 8;
}
this.rawWidth = this.data.extractPiece(0, 2).extractInt16();
this.Heigth = this.data.extractPiece(0, 2).extractInt16();
this.bpp = bpptoInt.ContainsKey(tmp) ? bpptoInt[tmp] : 4;
this.imageDataPosition = this.data.Position;
}
public unsafe Bitmap CreateUnsafeBitmap()
{
this.data.Position = this.imageDataPosition;
Bitmap bmp = new Bitmap(this.Width, this.Heigth, PixelFormat.Format32bppArgb);
BitmapData _bmd = bmp.LockBits(new Rectangle(0, 0, this.Width, this.Heigth), ImageLockMode.ReadWrite, bmp.PixelFormat);
int _pixelSize = 4;
byte* _current = (byte*)(void*)_bmd.Scan0;
for (int y = 0; y < this.Heigth; y++)
{
for (int x = 0; x < this.Width; x++)
{
if ((x * _pixelSize) % _pixelSize == 0 || x == 0)
{
byte t = (byte)this.data.ReadByte();
if (_bpp == 4)
{
Color color = CLUTColor(t & 0x0F);
SetPixel(_current, color);
_current += _pixelSize;
color = CLUTColor((t & 0xF0) >> 4);
SetPixel(_current, color);
x++;
}
else if (_bpp == 8)
{
Color color = CLUTColor(t);
SetPixel(_current, color);
}
else if (_bpp == 16)
{
ushort color = (ushort)(t + (data.ReadByte() << 8));
SetPixel(_current, Get16bitColor(color));
}
_current += _pixelSize;
}
}
}
bmp.UnlockBits(_bmd);
return bmp;
}
private unsafe void SetPixel(byte* pixel, Color color)
{
pixel[2] = color.R;
pixel[1] = color.G;
pixel[0] = color.B;
pixel[3] = color.A;
}
public Bitmap CreateBitmap()
{
this.data.Position = this.imageDataPosition;
Bitmap bmp = new Bitmap(this.Width, this.Heigth);
for (int y = 0; y < this.Heigth; y++)
{
for (int x = 0; x < this.Width; x++)
{
byte t = (byte)this.data.ReadByte();
if (_bpp == 4)
{
bmp.SetPixel(x, y, CLUTColor((t & 0x0F)));
bmp.SetPixel(x + 1, y, CLUTColor(((t & 0xF0) >> 4)));
x += 1;
}
else if (_bpp == 8)
{
bmp.SetPixel(x, y, CLUTColor(t));
}
}
}
return bmp;
}
private Color CLUTColor(int index)
{
if (clut == null) GenerateRandonCLUT();
index *= 2;
index += (int)paletteIndex * palleteSize[_bpp];
//index = Math.Abs(index);
//if (index > 512) index -= 512;
ushort color = (ushort)(clut[clutIndex][index] + (clut[clutIndex][index + 1] << 8));
return Get16bitColor(color);
}
private Color Get16bitColor(ushort color)
{
int R = (color & 0x1F) * 8;
int G = ((color & 0x3E0) >> 5) * 8;
int B = ((color & 0x7C00) >> 10) * 8;
int STP = ((color & 0x8000) >> 15);
int A = (R == 0 && G == 0 && B == 0 && STP == 0 && Transparency) ? 0 : 255;
return Color.FromArgb(A, R, G, B);
}
private int GetRealWidth()
{
switch (_bpp)
{
case 4:
return rawWidth * 4;
case 8:
return rawWidth * 2;
case 16:
return rawWidth;
case 24:
return rawWidth / 2;
default:
return 1;
}
}
private void GenerateRandonCLUT()
{
clut = new byte[1][];
clut[0] = new byte[512];
Random random = new Random();
for (int i = 0; i < 512; i++)
{
clut[0][i] = (byte)random.Next(0, 255);
}
}
public byte[] GetActualPallete()
{
byte[] pallete = new byte[palleteSize[_bpp]];
for (int i = 0; i < palleteSize[_bpp]; i++)
{
pallete[i] = clut[clutIndex][i];
}
return pallete;
}
public Color[] GetActualPalleteInCollors()
{
Color[] c = new Color[palleteSize[_bpp] / 2];
for (int i = 0; i < c.Length; i++)
{
c[i] = CLUTColor(i);
}
return c;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Data;
using System.Linq;
using System.Text;
using System.Windows.Forms;
namespace DAT_Unpacker
{
public partial class ColorSquare : UserControl
{
public ColorSquare()
{
InitializeComponent();
}
private Color _color = SystemColors.Control;
public int ID;
public Color color
{
get { return _color; }
set {
_color = value;
this.BackColor = value;
//Invalidate();
}
}
public ColorSquare(Color color, int id)
{
InitializeComponent();
//this.color = color;
//this.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.color = color;
this.ID = id;
}
private void ColorSquare_Load(object sender, EventArgs e)
{
}
protected override void OnClick(EventArgs e)
{
}
private void ColorSquare_DoubleClick(object sender, EventArgs e)
{
ColorDialog cd = new ColorDialog();
if (cd.ShowDialog() == DialogResult.OK)
{
color = cd.Color;
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace DAT_Unpacker
{
class Indexes
{
public int offset;
public List<int> subOffsets = new List<int>();
public Indexes(int offset)
{
this.offset = offset;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
namespace DAT_Unpacker
{
public partial class ColorPallet : Form
{
public ColorPallet()
{
InitializeComponent();
}
private void ColorPallet_Load(object sender, EventArgs e)
{
Color[] c = ((Main)this.Owner).GetPallete();
int i = 0;
for (int y = 0; y < 16; y++)
{
for (int x = 0; x < 16; x++, i++)
{
this.Controls.Add(new ColorSquare(c[i], i));
this.Controls[i].Location = new Point(x * 11, y * 11);
if (i == c.Length) break;
}
}
}
public void Teste()
{
MessageBox.Show("A");
}
}
}
| fbdc4be8c0ab59980898390c9ab304f71de737f6 | [
"Markdown",
"C#"
] | 8 | C# | NetoRpg/TIMViwer | 8cc9cb781a475522506b769722c63337392ddcd7 | 990edc419977ab9dee73fa431763e5778c10be7e |
refs/heads/master | <file_sep>spring.profiles.active=@spring.profiles.active@
server.port=8081
server.servlet.context-path=/api
spring.application.name=order-service
spring.banner.location=classpath:banner.txt<file_sep>package dev.lucasdeabreu.orderservice.service;
import dev.lucasdeabreu.orderservice.domain.Order;
public interface OrderService {
Order save(Order order);
}
<file_sep>FROM openjdk:11
ADD ["target/order-service-0.0.1-SNAPSHOT.jar", "app.jar"]
EXPOSE 8081
RUN sh -c 'touch /app.jar'
ENTRYPOINT [ "sh", "-c", "java -Djava.security.egd=file:/dev/./urandom -Dspring.profiles.active=docker -jar /app.jar" ]<file_sep>##### The idea of this project is use eventual consistency to decoupling services between each other.
In this example `payment-service` depend on `Order` information to
register a payment, instead of call `order-service` to request such
information instead `order-service` will send events to notify when
an `Order` is creating and then `payment-service` use this events
to keep a local copy of `Order`, doing this `payment-service` does
not need request any info to `order-service`.
Those events are sent via queue using RabbitMQ.
##### Trying out
Compile
```
mvn clean package
```
Run
```
docker-compose up
```
Create an Order
```
curl -X POST \
http://localhost:8081/api/orders \
-H 'Accept: */*' \
-H 'Content-Type: application/json' \
-d '{
"address": "Rua seila",
"confirmationDate": "2016-01-25T21:34:55",
"status": "NEW",
"items": [
{
"description": "Produto 1",
"unitPrice": 2.0,
"quantity": 1
}
]
}'
```
Pay the Order
```
curl -X POST \
http://localhost:8082/api/payments \
-H 'Accept: */*' \
-H 'Content-Type: application/json' \
-d '{
"creditCard": "1121212111112",
"value": 2,
"orderId": 5
}'
```<file_sep>package dev.lucasdeabreu.paymentservice.domain;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
import java.math.BigDecimal;
import java.util.List;
import java.util.Objects;
@Builder
@AllArgsConstructor
@NoArgsConstructor
@Data
@Entity
@Table(name = "orders")
public class Order {
@Id
private Long id;
@NotNull
@Column(nullable = false)
private BigDecimal value;
@NotNull
@Enumerated(EnumType.STRING)
@Column(nullable = false)
private OrderStatus status;
@Transient
private List<OrderItem> items;
public void calculateValue() {
if (Objects.nonNull(items)) {
value = items.stream()
.map(i -> BigDecimal.valueOf(i.getUnitPrice()).multiply(BigDecimal.valueOf(i.getQuantity())))
.reduce(BigDecimal.ZERO, BigDecimal::add);
}
}
}
<file_sep>spring.h2.console.enabled=true
spring.h2.console.path=/h2
spring.jpa.show-sql=true
spring.datasource.url=jdbc:h2:file:./target/payment-service
spring.datasource.username=sa
spring.datasource.password=
spring.rabbitmq.host=localhost
spring.rabbitmq.port=5672
spring.rabbitmq.username=guest
spring.rabbitmq.password=<PASSWORD>
queue.name.order.create=LOCAL_ORDER_CREATE<file_sep>package dev.lucasdeabreu.paymentservice.domain;
public enum PaymentStatus {
PENDING, PAYED, REJECTED;
}
<file_sep>package dev.lucasdeabreu.paymentservice.domain;
public enum OrderStatus {
NEW, PROCESSING, COMPLETED, REFUND
}
<file_sep>spring.profiles.active=@spring.profiles.active@
server.port=8082
server.servlet.context-path=/api
spring.application.name=payment-service
spring.banner.location=classpath:banner.txt
<file_sep>package dev.lucasdeabreu.orderservice.controller.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.validation.constraints.DecimalMin;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.NotNull;
import java.math.BigDecimal;
@Builder
@AllArgsConstructor
@NoArgsConstructor
@Data
public class OrderItemDTO {
private Long id;
@NotEmpty
private String description;
@NotNull
@DecimalMin("0.00")
private BigDecimal unitPrice;
@Min(1)
@NotNull
private Integer quantity;
}
<file_sep>package dev.lucasdeabreu.paymentservice.repository;
import dev.lucasdeabreu.paymentservice.domain.Order;
import org.springframework.data.jpa.repository.JpaRepository;
public interface OrderRepository extends JpaRepository<Order, Long> {
}
<file_sep>package dev.lucasdeabreu.orderservice.controller;
import dev.lucasdeabreu.orderservice.controller.dto.OrderDTO;
import dev.lucasdeabreu.orderservice.controller.dto.mapper.OrderDTOMapper;
import dev.lucasdeabreu.orderservice.service.OrderService;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/orders")
public class OrderController {
private final OrderService orderService;
private final OrderDTOMapper mapper;
public OrderController(OrderService orderService, OrderDTOMapper mapper) {
this.orderService = orderService;
this.mapper = mapper;
}
@PostMapping
public ResponseEntity createOrder(@RequestBody OrderDTO orderDTO) {
return ResponseEntity.status(HttpStatus.CREATED)
.body(mapper.orderToDto(orderService.save(mapper.dtoToOrder(orderDTO))));
}
}
<file_sep>package dev.lucasdeabreu.paymentservice.controller;
import dev.lucasdeabreu.paymentservice.controller.dto.PaymentDTO;
import dev.lucasdeabreu.paymentservice.controller.dto.mapper.PaymentDTOMapper;
import dev.lucasdeabreu.paymentservice.service.PaymentService;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/payments")
public class PaymentController {
private final PaymentService paymentService;
private final PaymentDTOMapper mapper;
public PaymentController(PaymentService paymentService, PaymentDTOMapper mapper) {
this.paymentService = paymentService;
this.mapper = mapper;
}
@PostMapping
public ResponseEntity payOrder(@RequestBody PaymentDTO paymentDTO) {
return ResponseEntity.status(HttpStatus.CREATED)
.body(mapper.paymentToDto(paymentService.save(mapper.dtoToPayment(paymentDTO))));
}
}
<file_sep>FROM openjdk:11
ADD ["target/payment-service-0.0.1-SNAPSHOT.jar", "app.jar"]
EXPOSE 8082
RUN sh -c 'touch /app.jar'
ENTRYPOINT [ "sh", "-c", "java -Djava.security.egd=file:/dev/./udockerandom -Dspring.profiles.active=docker -jar /app.jar" ] | f1ffe3f009105b165095a35961c4cf7bb0077c96 | [
"Markdown",
"Java",
"Dockerfile",
"INI"
] | 14 | INI | lucasdeabreu/eventual-consistency-example | 300537432c230f274e8da8ca6e0ddb3b5fc3e3e5 | a8b7c3569144e6c768b807a95d54e3105ad4b1ff |
refs/heads/master | <file_sep>This is PeiMing's industrial design portfolio
<file_sep>var BrowserDetect = {
init: function () {
this.browser = this.searchString(this.dataBrowser) || "An unknown browser";
this.version = this.searchVersion(navigator.userAgent)
|| this.searchVersion(navigator.appVersion)
|| "an unknown version";
this.OS = this.searchString(this.dataOS) || "an unknown OS";
},
searchString: function (data) {
for (var i=0;i<data.length;i++) {
var dataString = data[i].string;
var dataProp = data[i].prop;
this.versionSearchString = data[i].versionSearch || data[i].identity;
if (dataString) {
if (dataString.indexOf(data[i].subString) != -1)
return data[i].identity;
}
else if (dataProp)
return data[i].identity;
}
},
searchVersion: function (dataString) {
var index = dataString.indexOf(this.versionSearchString);
if (index == -1) return;
return parseFloat(dataString.substring(index+this.versionSearchString.length+1));
},
dataBrowser: [
{
string: navigator.userAgent,
subString: "Chrome",
identity: "Chrome"
},
{ string: navigator.userAgent,
subString: "OmniWeb",
versionSearch: "OmniWeb/",
identity: "OmniWeb"
},
{
string: navigator.vendor,
subString: "Apple",
identity: "Safari",
versionSearch: "Version"
},
{
prop: window.opera,
identity: "Opera",
versionSearch: "Version"
},
{
string: navigator.vendor,
subString: "iCab",
identity: "iCab"
},
{
string: navigator.vendor,
subString: "KDE",
identity: "Konqueror"
},
{
string: navigator.userAgent,
subString: "Firefox",
identity: "Firefox"
},
{
string: navigator.vendor,
subString: "Camino",
identity: "Camino"
},
{ // for newer Netscapes (6+)
string: navigator.userAgent,
subString: "Netscape",
identity: "Netscape"
},
{
string: navigator.userAgent,
subString: "MSIE",
identity: "Explorer",
versionSearch: "MSIE"
},
{
string: navigator.userAgent,
subString: "Gecko",
identity: "Mozilla",
versionSearch: "rv"
},
{ // for older Netscapes (4-)
string: navigator.userAgent,
subString: "Mozilla",
identity: "Netscape",
versionSearch: "Mozilla"
}
],
dataOS : [
{
string: navigator.platform,
subString: "Win",
identity: "Windows"
},
{
string: navigator.platform,
subString: "Mac",
identity: "Mac"
},
{
string: navigator.userAgent,
subString: "iPhone",
identity: "iPhone/iPod"
},
{
string: navigator.platform,
subString: "Linux",
identity: "Linux"
}
]
};
BrowserDetect.init();
var browser = BrowserDetect.browser;
$(function () {
preload('img/roku-content-pic', 'ID_Roku_', 14, '#roku');
preload('img/amc-content-pic', 'AMC_', 26, '#amc');
preload('img/thesis-content-pic', 'Thesis_L_', 36, '#thesis');
preload('img/skillset-content-pic', 'SkillSet_', 19, '#skill');
function projectSlideFn() {
var Page = (function() {
var $navArrows = $( '#nav-arrows' ),
$nav = $( '#nav-dots > span' ),
slitslider = $( '#slider' ).slitslider( {
onBeforeChange : function( slide, pos ) {
$nav.removeClass( 'nav-dot-current' );
$nav.eq( pos ).addClass( 'nav-dot-current' );
}
} ),
init = function() {
initEvents();
},
initEvents = function() {
// add navigation events
$navArrows.children( ':last' ).on( 'click', function() {
slitslider.next();
return false;
} );
$navArrows.children( ':first' ).on( 'click', function() {
slitslider.previous();
return false;
} );
$nav.each( function( i ) {
$( this ).on( 'click', function( event ) {
var $dot = $( this );
if( !slitslider.isActive() ) {
$nav.removeClass( 'nav-dot-current' );
$dot.addClass( 'nav-dot-current' );
}
slitslider.jump( i + 1 );
return false;
} );
} );
};
return { init : init };
})();
Page.init();
/**
* Notes:
*
* example how to add items:
*/
/*
var $items = $('<div class="sl-slide sl-slide-color-2" data-orientation="horizontal" data-slice1-rotation="-5" data-slice2-rotation="10" data-slice1-scale="2" data-slice2-scale="1"><div class="sl-slide-inner bg-1"><div class="sl-deco" data-icon="t"></div><h2>some text</h2><blockquote><p>bla bla</p><cite><NAME></cite></blockquote></div></div>');
// call the plugin's add method
ss.add($items);
*/
}
function menuBehavior() {
$('#about-link').on('click', function () {
$('.main-content').hide();
$('#about').fadeIn();
});
$('#home-link').on('click', function () {
$('.main-content').hide();
$('#landing').fadeIn();
});
}
function landingPageBehavior() {
var landingImg = $('.landing').find('img');
landingImg.on('click', function () {
$('.main-content').hide();
$('#project-slide').fadeIn();
});
}
function projectSlideLink() {
$('#roku-link').on('click', function () {
$('.body-bg').addClass('landing');
if (!$('#main').hasClass('portfolio_main')) $('#main').addClass('portfolio_main');
$('.main-content').hide();
$('#roku').fadeIn().trigger('resize');
$('#footer').addClass('project-show').on('click', function () {
$('.main-content').hide();
$('#project-slide').fadeIn();
});
});
$('#thesis-link').on('click', function () {
$('.body-bg').addClass('landing');
if (!$('#main').hasClass('portfolio_main')) $('#main').addClass('portfolio_main');
$('.main-content').hide();
$('#thesis').fadeIn().trigger('resize');
$('#footer').addClass('project-show').on('click', function () {
$('.main-content').hide();
$('#project-slide').fadeIn();
});
});
$('#amc-link').on('click', function () {
$('.body-bg').addClass('landing');
if (!$('#main').hasClass('portfolio_main')) $('#main').addClass('portfolio_main');
$('.main-content').hide();
$('#amc').fadeIn().trigger('resize');
$('#footer').addClass('project-show').on('click', function () {
$('.main-content').hide();
$('#project-slide').fadeIn();
});
});
$('#skill-link').on('click', function () {
$('.body-bg').addClass('landing');
if (!$('#main').hasClass('portfolio_main')) $('#main').addClass('portfolio_main');
$('.main-content').hide();
$('#skill').fadeIn().trigger('resize');
$('#footer').addClass('project-show').on('click', function () {
$('.main-content').hide();
$('#project-slide').fadeIn();
});
});
}
function preload(picDir, picName, numOfPic, goalID) {
// picDir = img
// picName = pictures name convention
// numOfPic = how many pictures are there
// goalID = destination DIV block
// generate picture paths array
var arrayOfImages = []
for (var i = 0; i < numOfPic; i++) {
if (i >= 9) {
arrayOfImages.push('./' + picDir + '/' + picName + (i + 1).toString() + '.jpg');
}
else {
arrayOfImages.push('./' + picDir + '/' + picName + '0' + (i + 1).toString() + '.jpg');
}
}
var totalWidth = 0;
// preloading images, appending images to corresponding DIV
$(arrayOfImages).each(function () {
var newImg = $('<img />').attr({
'class' : 'portfolio-pic',
'src' : this
}).appendTo($(goalID));
newImg.load(function () {
totalWidth += $(this).width();
if (browser === 'Firefox') {
$(goalID).css({
'width' : totalWidth - 24.0
});
} else {
$(goalID).css({
'width' : totalWidth
});
}
});
});
}
function scaleFont() {
var viewWidth = $(window).width();
if (viewWidth >= 967) { $('body').css({'font-size' : '14px'}); }
else if (viewWidth >= 845) { $('body').css({'font-size' : '12px'}); }
else { $('body').css({'font-size' : '10px'}); }
}
function resizeWidthAdjust () {
$('.portfolio').each(function () {
var totalWidth = 0;
// console.log('width: ', firstImg[0].width, ', height: ', firstImg[0].height);
// var firstImg = $(this).find('img').eq(0);
// console.log('first ratio: ', firstImg[0].width / firstImg[0].height);
$(this).find('img').each(function () {
totalWidth += $(this).width();
// var widthToHeightRaito = this.width / this.height;
// totalWidth += $(this).height() * widthToHeightRaito;
});
if (browser === 'Firefox') { $(this).css({'width' : (totalWidth)}); }
else { $(this).css({'width' : totalWidth}); }
});
$('#c-height').text($('#main').height());
var landingImg = $('.landing').find('img');
landingImg.css('margin-top', landingImg.height() / -2.0);
var menu = $('#menu');
menu.css('margin-right', -menu.find('li').eq(0).outerWidth()*3.2);
}
function heightExamine () {
var windowsHeight = $(window).height();
var windowsWidth = $(window).width();
$('#footer').text('Height: ' + windowsHeight + ', Width: ' + windowsWidth);
}
if (browser === 'Firefox') { $('.portfolio-pic').css('margin', '0 -1px'); }
$(window).resize(function () {
resizeWidthAdjust();
scaleFont();
console.log('resized!');
});
var landingImg = $('.landing').find('img');
landingImg.load(function () {
landingImg.css('margin-top', landingImg.height() / -2.0);
});
scaleFont();
landingPageBehavior();
projectSlideFn();
menuBehavior();
var menu = $('#menu');
menu.css('margin-right', -menu.find('li').eq(0).outerWidth()*3.2);
menu.find('li').eq(0).on('click', function() {
if ($(this).hasClass('menuShow')) {
$(this).removeClass('menuShow');
menu.animate({
'margin-right': '0'}, 700);
}
else {
$(this).addClass('menuShow');
menu.animate({
'margin-right': -menu.find('li').eq(0).outerWidth()*3.2}, 700);
}
});
$('#c-height').text($('#main').height());
projectSlideLink();
});
| a596c42535ecc7825b1e2e717733eed75ba541af | [
"Markdown",
"JavaScript"
] | 2 | Markdown | peiming/peiming.github.io | 16ca0f8fb69fb4973f494bed4666871f46d7ce5f | 689516d3a2bebe790c0342e5a8f0b9dc44ef4edc |
refs/heads/master | <file_sep>
document.write('<script src="//trkr.scdn1.secure.raxcdn.com/t/5657f9703bb2f89d12000215.js"></script>');
var _selldo = [{_async:true}];
var _selldo = [];
_selldo.push({project_id : "56659cb03bb2f8f46900001b"});
window.sell_do_form_successfully_submitted = function(data, event){
try{
dataLayer.push({
'event' : 'selldo_form_submitted'
});
}catch(err){}
};
$(document).ready(function(){
$(function(){
$(document).on( 'scroll', function(){
if ($(window).scrollTop() > 100) {
$('.scroll-top-wrapper').fadeIn();
} else {
$('.scroll-top-wrapper').fadeOut();
}
});
$('.scroll-top-wrapper').on('click', scrollToTop);
});
function scrollToTop() {
verticalOffset = typeof(verticalOffset) != 'undefined' ? verticalOffset : 0;
element = $('body');
offset = element.offset();
offsetTop = offset.top;
$('html, body').animate({scrollTop: offsetTop}, 500, 'linear');
}
// debugger
// $(".header-section").load("header.html");
// $(".footer-content").load("footer.html");
});
| 1bb65f3553997f881c9bfa4bf1d79f670ea485a0 | [
"JavaScript"
] | 1 | JavaScript | Baskarbss/hira-clean-up | bc8c9f11cb82cf67cc20c3d67129f0beea12e40c | 2b0fb774806db148616e99a2453f883c2441b759 |
refs/heads/master | <repo_name>xwang985/wave_guitar_shop<file_sep>/client/src/actions/product_actions.js
import axios from "axios";
import {
GET_PRODUCT_BY_ARRIVAL,
GET_PRODUCT_BY_SELL,
GET_WOODS,
GET_BRANDS,
GET_PRODUCTS_TO_SHOP,
ADD_PRODUCT,
ADD_BRAND,
ADD_WOOD,
GET_GUITAR_BY_ID,
CLEAR_PRODUCT_DETAIL,
UPDATE_PRODUCT
} from "./types";
import { PRODUCT_SERVER } from "../components/utils/misc";
export function getProductBySell() {
// sortedBy=createdAt&Order=desc&limit=4
const request = axios
.get(`${PRODUCT_SERVER}/guitars?sortBy=sold&order=desc&limit=4`)
.then(res => res.data);
return { type: GET_PRODUCT_BY_SELL, payload: request };
}
export function getProductByArrival() {
const request = axios
.get(`${PRODUCT_SERVER}/guitars?sortBy=createdAt&order=desc&limit=4`)
.then(res => res.data);
return { type: GET_PRODUCT_BY_ARRIVAL, payload: request };
}
export function getBrands() {
const request = axios.get(`${PRODUCT_SERVER}/brands`).then(res => res.data);
return { type: GET_BRANDS, payload: request };
}
export function addBrand(dataToSubmit, previousBrands) {
const request = axios
.post(`${PRODUCT_SERVER}/brand`, dataToSubmit)
.then(res => {
let brands = [...previousBrands, res.data.brand];
return { success: res.data.success, brands };
});
return { type: ADD_BRAND, payload: request };
}
export function getWoods() {
const request = axios.get(`${PRODUCT_SERVER}/woods`).then(res => res.data);
return { type: GET_WOODS, payload: request };
}
export function addWood(dataToSubmit, previousWoods) {
const request = axios
.post(`${PRODUCT_SERVER}/wood`, dataToSubmit)
.then(res => {
let woods = [...previousWoods, res.data.wood];
return { success: res.data.success, woods };
});
return { type: ADD_WOOD, payload: request };
}
export function getProductsToShop(
skip,
limit,
filters = [],
previousState = []
) {
const data = { limit, skip, filters };
const request = axios.post(`${PRODUCT_SERVER}/shop`, data).then(res => {
let newState = [...previousState, ...res.data.guitars];
return { size: res.data.size, guitars: newState };
});
return { type: GET_PRODUCTS_TO_SHOP, payload: request };
}
export function addProduct(dataToSubmit) {
const request = axios
.post(`${PRODUCT_SERVER}/guitar`, dataToSubmit)
.then(res => res.data);
return { type: ADD_PRODUCT, payload: request };
}
export function getGuitarById(id) {
const request = axios
.get(`${PRODUCT_SERVER}/guitars_by_id?id=${id}&type=single`)
.then(res => res.data[0]);
return { type: GET_GUITAR_BY_ID, payload: request };
}
export function clearProductDetail() {
return { type: CLEAR_PRODUCT_DETAIL, payload: "" };
}
export function updateProduct(dataToSubmit) {
const request = axios
.post(`${PRODUCT_SERVER}/update_product`, dataToSubmit)
.then(res => res.data);
return { type: UPDATE_PRODUCT, payload: request };
}
<file_sep>/client/src/components/userDashboard/index.js
import React from "react";
import UserLayout from "../../hoc/UserLayout";
import Button from "../utils/Button";
import HistoryBlock from "../utils/user/HistoryBlock";
const UserDashboard = ({ user }) => {
const { name, lastname, email, history } = user.userData;
return (
<UserLayout>
<>
<div className="user_nfo_panel">
<h1>User Information</h1>
<>
<span>{name}</span>
<span>{lastname}</span>
<span>{email}</span>
</>
<Button
type="default"
linkTo="/user/user_profile"
title="Edit Information"
/>
</div>
{history && (
<div className="user_nfo_panel">
<h1>Purchase History</h1>
<div className="user_product_block_wrapper">
<HistoryBlock history={history} />
</div>
</div>
)}
</>
</UserLayout>
);
};
export default UserDashboard;
<file_sep>/client/src/components/shop/LoadMoreCards.js
import React from "react";
import CardBlockShop from "../utils/card/CardBlockShop";
const LoadMoreCards = ({ grid, list, limit, size, loadMore }) => {
return (
<>
<CardBlockShop grid={grid} list={list} />
{size >= limit && (
<div className="load_more_container">
<span onClick={loadMore}>Load more</span>
</div>
)}
</>
);
};
export default LoadMoreCards;
<file_sep>/server/server.js
const express = require("express");
const bodyParser = require("body-parser");
const cookieParser = require("cookie-parser");
const formidable = require("express-formidable");
const cloudinary = require("cloudinary");
const async = require("async");
const moment = require("moment");
const app = express();
const mongoose = require("mongoose");
require("dotenv").config();
mongoose.Promise = global.Promise;
// mongoose.connect(process.env.DATABASE);
mongoose.connect(process.env.MONGODB_URI);
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
app.use(cookieParser());
//for heroku
app.use(express.static("client/build"));
// Set up cloudinary account
cloudinary.config({
cloud_name: process.env.CLOUD_NAME,
api_key: process.env.CLOUD_API_KEY,
api_secret: process.env.CLOUD_API_SECRET
});
// Models
const { User } = require("./models/user");
const { Brand } = require("./models/brand");
const { Wood } = require("./models/wood");
const { Guitar } = require("./models/guitar");
const { Payment } = require("./models/payment");
const { Site } = require("./models/site");
//Middleware
const { auth } = require("./middleware/auth");
const { admin } = require("./middleware/admin");
mongoose.set("useFindAndModify", false);
//Utils
const { sendEmail } = require("./utils/mail/index");
//===========================================
// Guitar
//===========================================
// get produts on the shop page
app.post("/api/product/shop", (req, res) => {
let order = req.body.order ? req.body.order : "asc";
let sortBy = req.body.sortBy ? req.body.sortBy : "_id";
let limit = req.body.limit ? parseInt(req.body.limit) : 100;
let skip = parseInt(req.body.skip);
let findArgs = {};
for (let key in req.body.filters) {
if (req.body.filters[key].length === 0) continue;
if (key === "price") {
findArgs[key] = {
$gte: req.body.filters[key][0],
$lte: req.body.filters[key][1]
};
} else {
findArgs[key] = req.body.filters[key];
}
}
findArgs["publish"] = true;
Guitar.find(findArgs)
.populate("brand")
.populate("wood")
.sort([[sortBy, order]])
.skip(skip)
.limit(limit)
.exec((err, guitars) => {
if (err) return res.status(400).send(err);
res.status(200).json({ size: guitars.length, guitars, findArgs });
});
});
// Guitar register
app.post("/api/product/guitar", auth, admin, (req, res) => {
const guitar = new Guitar(req.body);
guitar.save((err, doc) => {
if (err) return res.json({ success: false, err });
return res.status(200).json({
success: true
// guitar: doc
});
});
});
// Get guitar by arrival
// sortBy=createdAt&order=desc&limit=4&skip=5
app.get("/api/product/guitars", (req, res) => {
let order = req.query.order ? req.query.order : "asc";
let sortBy = req.query.sortBy ? req.query.sortBy : "_id";
let limit = req.query.limit ? parseInt(req.query.limit) : 100;
Guitar.find()
.populate("brand")
.populate("wood")
.sort([[sortBy, order]])
.limit(limit)
.exec((err, guitars) => {
if (err) return res.status(400).send(err);
res.status(200).send(guitars);
});
});
// Get guitar by id
app.get("/api/product/guitars_by_id", (req, res) => {
let type = req.query.type;
let items = req.query.id;
if (type === "array") {
let ids = items.split(",");
items = [];
items = ids.map(id => mongoose.Types.ObjectId(id));
}
Guitar.find({ _id: { $in: items } })
.populate("brand")
.populate("wood")
.exec((err, doc) => res.status(200).send(doc));
});
//update guitar
app.post("/api/product/update_product", auth, (req, res) => {
Guitar.findOneAndUpdate(
{ name: req.body.name },
{ $set: req.body },
{ new: true },
(err, doc) => {
if (err) return res.json({ success: false, err });
res.status(200).json({ success: true }); //userData: doc
}
);
});
//===========================================
// Woods
//===========================================
// Wood register
app.post("/api/product/wood", auth, admin, (req, res) => {
const wood = new Wood(req.body);
wood.save((err, doc) => {
if (err) return res.json({ success: false, err });
res.status(200).json({
success: true,
wood: doc
});
});
});
// Get wood
app.get("/api/product/woods", (req, res) => {
Wood.find({}, (err, woods) => {
if (err) return res.status(400).send(err);
res.status(200).send(woods);
});
});
//===========================================
// BRAND
//===========================================
// Brand register
app.post("/api/product/brand", auth, admin, (req, res) => {
const brand = new Brand(req.body);
brand.save((err, doc) => {
if (err) return res.json({ success: false, err });
res.status(200).json({
success: true,
brand: doc
});
});
});
// Get brand
app.get("/api/product/brands", (req, res) => {
Brand.find({}, (err, brands) => {
if (err) return res.status(400).send(err);
res.status(200).send(brands);
});
});
//===========================================
// USER
//===========================================
// auth
app.get("/api/users/auth", auth, (req, res) => {
res.status(200).json({
isAdmin: req.user.role === 0 ? false : true,
isAuth: true,
email: req.user.email,
name: req.user.name,
lastname: req.user.lastname,
role: req.user.role,
cart: req.user.cart,
cartTotal: req.user.cartTotal,
history: req.user.history
});
});
// User Register
app.post("/api/users/register", (req, res) => {
const user = new User(req.body);
user.save((err, doc) => {
if (err) return res.json({ success: false, err });
sendEmail(doc.email, doc.name, null, "welcome");
return res.status(200).json({
registerSuccess: true
// userdata: doc
});
});
});
// User Login
// find the email, compare the password and generate a token
app.post("/api/users/login", (req, res) => {
User.findOne({ email: req.body.email }, (err, user) => {
if (!user)
return res.json({
loginSuccess: false,
message: "Auth failed. Email not found"
});
user.comparePassword(req.body.password, (err, isMatch) => {
if (!isMatch)
return res.json({ loginSuccess: false, message: "Wrong password" });
user.generateToken((err, user) => {
if (err) res.status(400).send(err);
res
.cookie("w_auth", user.token)
.status(200)
.json({ loginSuccess: true });
});
});
});
});
//User Logout
app.get("/api/users/logout", auth, (req, res) => {
User.findOneAndUpdate({ _id: req.user._id }, { token: "" }, (err, doc) => {
if (err) return res.json({ logoutSuccess: false, err });
return res.status(200).send({ logoutSuccess: true });
});
});
//
// upload image
app.post("/api/users/uploadimage", auth, admin, formidable(), (req, res) => {
cloudinary.uploader.upload(
req.files.file.path,
result => {
res.status(200).send({ public_id: result.public_id, url: result.url });
},
{
public_id: `${Date.now()}`,
resource_type: "auto"
}
);
});
//add cart
app.post("/api/users/addtocart", auth, (req, res) => {
User.findOne({ _id: req.user._id }, (err, doc) => {
let duplicate = false;
// doc.cart.forEach(item => {
for (let item of doc.cart) {
if (item.id == req.query.productId) {
duplicate = true;
break;
}
}
console.log(duplicate);
if (duplicate) {
User.findOneAndUpdate(
{
_id: req.user._id,
"cart.id": mongoose.Types.ObjectId(req.query.productId)
},
{ $inc: { "cart.$.quantity": 1, cartTotal: 1 } },
{ new: true },
(err, file) => {
if (err) return res.json({ success: false, err });
res.status(200).send({ cart: file.cart, cartTotal: file.cartTotal });
}
);
} else {
User.findOneAndUpdate(
{ _id: req.user._id },
{
$push: {
cart: {
id: mongoose.Types.ObjectId(req.query.productId),
quantity: 1,
date: Date.now()
}
},
$inc: { cartTotal: 1 }
},
{ new: true },
(err, doc) => {
if (err) return res.json({ success: false, err });
res.status(200).send({ cart: doc.cart, cartTotal: doc.cartTotal });
}
);
}
});
});
//Remove item from cart
app.get("/api/users/removefromcart", auth, (req, res) => {
User.findOneAndUpdate(
{ _id: req.user._id },
{
$pull: { cart: { id: mongoose.Types.ObjectId(req.query._id) } },
$inc: { cartTotal: -req.query.qty }
},
{ new: true },
(err, doc) => {
let cart = doc.cart;
let cartTotal = doc.cartTotal;
let array = cart.map(item => {
return mongoose.Types.ObjectId(item.id);
});
Guitar.find({ _id: { $in: array } })
.populate("brand")
.populate("wood")
.exec((err, cartDetail) => {
return res.status(200).json({
cartDetail,
cart,
cartTotal
});
});
}
);
});
//Order success
app.post("/api/users/ordersuccess", auth, (req, res) => {
//user history
let history = [];
let transactionData = {};
req.body.cartDetail.forEach(item =>
history.push({
dateOfPurchase: Date.now(),
name: item.name,
breand: item.brand,
id: item._id,
price: item.price,
quantity: item.quantity,
paymentId: req.body.paymentData.paymentId
})
);
//payment dash
transactionData.user = {
id: req.user._id,
name: req.user.name,
lastname: req.user.lastname,
email: req.user.email
};
transactionData.data = req.body.paymentData;
transactionData.product = history;
//update user info
User.findOneAndUpdate(
{ _id: req.user._id },
{ $push: { history: history }, $set: { cart: [], cartTotal: 0 } },
{ new: true },
(err, user) => {
if (err) return res.json({ success: false, err });
const payment = new Payment(transactionData);
payment.save((err, doc) => {
if (err) return res.json({ success: false, err });
let products = [];
doc.product.forEach(item => {
products.push({ id: item.id, quantity: item.quantity });
});
async.eachSeries(
products,
(item, callback) => {
Guitar.update(
{ _id: item.id },
{ $inc: { sold: item.quantity } },
{ new: false },
callback
);
},
err => {
if (err) return res.json({ success: false, err });
res.status(200).json({
success: true,
cart: user.cart,
cartDetail: []
});
}
);
});
}
);
});
//update user
app.post("/api/users/update_profile", auth, (req, res) => {
User.findOneAndUpdate(
{ _id: req.user._id },
{ $set: req.body },
{ new: true },
(err, doc) => {
if (err) return res.json({ success: false, err });
res.status(200).json({ success: true }); //userData: doc
}
);
});
//reset user
app.post("/api/users/reset_user", (req, res) => {
User.findOne({ email: req.body.email }, (err, user) => {
if (user == null) return res.json({ success: false });
user.generateResetToken((err, user) => {
if (err) return res.json({ success: false });
sendEmail(user.email, user.name, null, "reset_password", user);
res.json({ success: true });
});
});
});
//reset password
app.post("/api/users/reset_password", (req, res) => {
var today = moment()
.startOf("day")
.valueOf();
User.findOne(
{ resetToken: req.body.resetToken, resetTokenExp: { $gte: today } },
(err, user) => {
console.log(user);
if (!user)
return res.json({
success: false
});
user.password = <PASSWORD>;
user.resetToken = "";
user.resetTokenExp = "";
user.save((err, doc) => {
if (err) return res.status(400).json({ success: false, err });
return res.status(200).json({
success: true
});
});
}
);
});
//===========================================
// Cloudindary
//===========================================
// remove image from cloudinary
app.get("/api/users/removeimage", auth, admin, (req, res) => {
let id = req.query.public_id;
cloudinary.uploader.destroy(id, (error, result) => {
if (error) return res.json({ success: false });
res.status(200).send("image removed successfully");
});
});
//===========================================
// Site
//===========================================
//get site data
app.get("/api/site/site_data", (req, res) => {
Site.find({}, (err, site) => {
if (err) return res.json({ success: false });
res.status(200).send(site[0].siteInfo);
});
});
app.post("/api/site/site_data", auth, admin, (req, res) => {
Site.findOneAndUpdate(
{ name: "Site" },
{ $set: { siteInfo: req.body } },
{ new: true },
(err, doc) => {
if (err) return res.json({ success: false, err });
return res.status(200).send({
success: true,
siteInfo: doc.siteInfo
});
}
);
});
//===========================================
// Default
//===========================================
// only for production
if (process.env.NODE_ENV === "production") {
const path = require("path");
app.get("/*", (req, res) => {
res.sendfile(path.resolve(__dirname, "../client", "build", "index.html"));
});
}
const port = process.env.PORT || 3002;
app.listen(port, () => {
console.log(`Server Running at ${port}`);
});
<file_sep>/client/src/Routes.js
import React from "react";
import { Route, Switch } from "react-router-dom";
import Home from "./components/home";
import ProductPage from "./components/productPage";
import RegisterLogin from "./components/register_login";
import Register from "./components/register_login/Register";
import Shop from "./components/shop";
import UserDashboard from "./components/userDashboard";
import AddProducts from "./components/userDashboard/Admin/AddProducts";
import ManageCategories from "./components/userDashboard/Admin/ManageCategories";
import Auth from "./hoc/Auth";
import Layout from "./hoc/Layout";
import UserCart from "./components/userDashboard/UserCart";
import UpdateProfile from "./components/userDashboard/UpdateProfile";
import ManageSite from "./components/userDashboard/Admin/ManageSite";
import ResetUser from "./components/resetUser";
import PageNotFound from "./components/utils/PageNotFound";
import ResetPassword from "./components/resetUser/ResetPassword";
const Routes = () => {
return (
<Layout>
<Switch>
<Route exact path="/" component={Auth(Home)} />
<Route exact path="/product_detail/:id" component={Auth(ProductPage)} />
<Route exact path="/shop" component={Auth(Shop)} />
<Route
exact
path="/register_login"
component={Auth(RegisterLogin, false)}
/>
<Route exact path="/register" component={Auth(Register, false)} />
<Route exact path="/reset_user" component={Auth(ResetUser, false)} />
<Route
exact
path="/reset_password/:token"
component={Auth(ResetPassword, false)}
/>
<Route
exact
path="/user/dashboard"
component={Auth(UserDashboard, true)}
/>
<Route
exact
path="/user/user_profile"
component={Auth(UpdateProfile, true)}
/>
<Route exact path="/user/cart" component={Auth(UserCart, true)} />
<Route
exact
path="/admin/add_product"
component={Auth(AddProducts, true, true)}
/>
<Route
exact
path="/admin/modify_product/:id"
component={Auth(AddProducts, true, true)}
/>
<Route
exact
path="/admin/manage_categories"
component={Auth(ManageCategories, true, true)}
/>
<Route
exact
path="/admin/site_info"
component={Auth(ManageSite, true, true)}
/>
<Route component={Auth(PageNotFound)} />
</Switch>
</Layout>
);
};
export default Routes;
<file_sep>/client/src/actions/site_actions.js
import axios from "axios";
import { GET_SITE_INFO, UPDATE_SITE_INFO } from "./types";
import { SITE_SERVER } from "../components/utils/misc";
export function getSiteInfo() {
let request = axios.get(`${SITE_SERVER}/site_data`).then(res => res.data);
return { type: GET_SITE_INFO, payload: request };
}
export function updateSiteInfo(dataToSubmit) {
let request = axios
.post(`${SITE_SERVER}/site_data`, dataToSubmit)
.then(res => res.data);
return { type: UPDATE_SITE_INFO, payload: request };
}
<file_sep>/client/src/components/utils/card/CardBlockShop.js
import React from "react";
import Card from "../../utils/card/Card";
const CardBlockShop = ({ grid, list = [] }) => {
const renderCards = cards =>
cards && cards.map(card => <Card key={card._id} {...card} grid={grid} />);
return (
<div className="card_block_shop">
{!list.length && <div className="no_result">Sorry no result</div>}
{renderCards(list)}
</div>
);
};
export default CardBlockShop;
<file_sep>/client/src/actions/cloudinary.js
import axios from "axios";
export const postImage = (formData, config) => {
return axios.post("/api/users/uploadimage", formData, config);
};
export const removeImage = id => {
return axios.get(`/api/users/removeimage?public_id=${id}`);
};
<file_sep>/client/src/components/shop/index.js
import React, { Component } from "react";
import PageTop from "../utils/PageTop";
import { connect } from "react-redux";
import CollapsCheckBox from "../utils/CollapseCheckBox";
import { frets, prices } from "../utils/form/FixedCategories";
import LoadMoreCards from "./LoadMoreCards";
import {
getBrands,
getWoods,
getProductsToShop
} from "../../actions/product_actions";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { faBars, faTh } from "@fortawesome/free-solid-svg-icons";
import { CircularProgress } from "@material-ui/core";
class Shop extends Component {
state = {
isLoading: true,
grid: false,
limit: 6,
skip: 0,
filters: {
brand: [],
fret: [],
wood: [],
price: []
}
};
componentDidMount() {
this.props.dispatch(getBrands());
this.props.dispatch(getWoods());
this.props
.dispatch(
getProductsToShop(this.state.skip, this.state.limit, this.state.filters)
)
.then(this.setState({ isLoading: false }));
}
handleFilters = (filters, category) => {
const newFilters = { ...this.state.filters };
newFilters[category] = filters;
if (category === "price") {
let priceValue = prices.find(price => price._id === filters).array;
newFilters[category] = priceValue;
}
this.showFilteredResult(newFilters);
this.setState({ filters: newFilters });
};
showFilteredResult = filters => {
this.props
.dispatch(getProductsToShop(0, this.state.limit, filters))
.then(() => this.setState({ skip: 0 }));
};
loadMore = () => {
let skip = this.state.skip + this.state.limit;
this.props
.dispatch(
getProductsToShop(
skip,
this.state.limit,
this.state.filters,
this.props.products.toShop
)
)
.then(() => this.setState({ skip }));
};
handleGrids = () =>
this.setState({ grid: !this.state.grid ? "grid_bars" : "" });
render() {
const products = this.props.products;
return (
<>
<PageTop title="browse products" />
<div className="container">
<div className="shop_wrapper">
<div className="left">
<CollapsCheckBox
open={true}
title="Brands"
list={products.brands}
handleFilters={filters => this.handleFilters(filters, "brand")}
/>
<CollapsCheckBox
open={false}
title="Frets"
list={frets}
handleFilters={filters => this.handleFilters(filters, "fret")}
/>
<CollapsCheckBox
open={true}
title="Woods"
list={products.woods}
handleFilters={filters => this.handleFilters(filters, "wood")}
/>
<CollapsCheckBox
open={false}
title="Prices"
list={prices}
handleFilters={filters => this.handleFilters(filters, "price")}
/>
</div>
{this.state.isLoading ? (
<CircularProgress style={{ color: "#00bcd4" }} thickness={7} />
) : (
<div className="right">
<div className="shop_options">
<div className="shop_grids clear">
<div
className={`grid_btn ${!this.state.grid ? "" : "active"}`}
onClick={this.handleGrids}
>
<FontAwesomeIcon icon={faBars} />
</div>
<div
className={`grid_btn ${this.state.grid ? "" : "active"}`}
onClick={this.handleGrids}
>
<FontAwesomeIcon icon={faTh} />
</div>
</div>
</div>
<LoadMoreCards
grid={this.state.grid}
limit={this.state.limit}
list={products.toShop}
size={products.toShopSize}
loadMore={this.loadMore}
/>
</div>
)}
</div>
</div>
</>
);
}
}
const mapStateToProps = state => {
return { products: state.products };
};
export default connect(mapStateToProps)(Shop);
<file_sep>/client/src/components/utils/Button.js
import React from "react";
import { Link } from "react-router-dom";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { faShoppingBag } from "@fortawesome/free-solid-svg-icons";
const Button = ({
type = "default",
linkTo,
addStyles,
title,
altClass,
runAction
}) => {
const buttons = () => {
let template = "";
switch (type) {
case "default":
template = (
<Link
className={!altClass ? "link_default" : altClass}
to={linkTo}
{...addStyles}
>
{title}
</Link>
);
break;
case "bag_link":
template = (
<div className="bag_link" onClick={runAction}>
<FontAwesomeIcon icon={faShoppingBag} />
</div>
);
break;
case "add_to_cart_link":
template = (
<div className="add_to_cart_link" onClick={runAction}>
<FontAwesomeIcon icon={faShoppingBag} />
Add to cart
</div>
);
break;
default:
template = "";
}
return template;
};
return <div className="my_link">{buttons()}</div>;
};
export default Button;
<file_sep>/client/src/components/resetUser/ResetPassword.js
import React, { Component } from "react";
import FormField from "../utils/form/FormField";
import { update, generateData, isFormValid } from "../utils/form/FormActions";
import { Dialog } from "@material-ui/core";
import { connect } from "react-redux";
import { resetPassword } from "../../actions/user_actions";
class ResetPassword extends Component {
state = {
resetToken: "",
formError: false,
formSuccess: false,
formData: {
password: {
element: "input",
value: "",
config: {
name: "password",
type: "password",
placeholder: "Enter your password"
},
validation: {
required: true
},
valid: false,
touched: false,
validationMessage: "",
description: "password must be at least 8 characters"
},
passwordConfirm: {
element: "input",
value: "",
config: {
name: "passwordConfirm",
type: "password",
placeholder: "Please Re-type your password"
},
validation: {
required: true,
confirm: "password"
},
valid: false,
touched: false,
validationMessage: ""
}
}
};
componentDidMount() {
const resetToken = this.props.match.params.token;
this.setState({ resetToken });
}
updateForm = element => {
const newFormData = update(element, this.state.formData, "register");
this.setState({ formError: false, formData: newFormData });
};
submitForm = event => {
event.preventDefault();
let dataToSubmit = generateData(this.state.formData, "register");
let formIsValid = isFormValid(this.state.formData, "register");
if (!formIsValid) {
return this.setState({ formError: true });
}
this.props
.dispatch(
resetPassword({
...dataToSubmit,
resetToken: this.state.resetToken
})
)
.then(res => {
if (!res.payload.success) {
this.setState({ formError: true }, () =>
setTimeout(() => this.setState({ formError: false }), 2000)
);
} else {
this.setState({ formSuccess: true }, () => {
setTimeout(() => {
this.setState({ formSuccess: false });
this.props.history.push("/register_login");
}, 2000);
});
}
});
};
render() {
return (
<div className="container">
<form onSubmit={event => this.submitForm(event)}>
<div className="block">
<FormField
id={"password"}
formData={this.state.formData.password}
change={element => this.updateForm(element)}
/>
</div>
<div className="block">
<FormField
id={"passwordConfirm"}
formData={this.state.formData.passwordConfirm}
change={element => this.updateForm(element)}
/>
</div>
{this.state.formError && (
<div className="error_label">Please check your data</div>
)}
<button type="submit">Reset your password</button>
</form>
<Dialog open={this.state.formSuccess}>
<div className="dialog_alert">
<div>Password reset successfully</div>
<div>Your will be redirected to login page</div>
</div>
</Dialog>
</div>
);
}
}
export default connect()(ResetPassword);
<file_sep>/client/src/components/utils/card/Card.js
import React, { Component } from "react";
import Button from "../Button";
import { connect } from "react-redux";
import { addToCart } from "../../../actions/user_actions";
import { withRouter } from "react-router-dom";
class Card extends Component {
state = {};
renderCardImage = images => {
return images.length ? images[0].url : "/images/image_not_available.png";
};
render() {
const props = this.props;
return (
<div className={`card_item_wrapper ${props.grid}`}>
<div
className="image"
style={{ background: `url(${this.renderCardImage(props.images)})` }}
></div>
<div className="action_container">
<div className="tags">
<div className="brand">{props.brand.name}</div>
<div className="name">{props.name}</div>
<div className="price">${props.price}</div>
</div>
{props.grid && <div className="description">{props.description}</div>}
<div className="actions">
<div className="button_wrapp">
<Button
linkTo={`/product_detail/${props._id}`}
addStyles={{ margin: "10px 0 0 0" }}
title="View Product"
altClass="card_link"
/>
</div>
<div className="button_wrapp">
<Button
type="bag_link"
runAction={() => {
props.user.userData.isAuth
? this.props.dispatch(addToCart(props._id))
: this.props.history.push("/register_login");
}}
/>
</div>
</div>
</div>
</div>
);
}
}
const mapStateToProps = state => {
return { user: state.user };
};
export default connect(mapStateToProps)(withRouter(Card));
<file_sep>/client/src/components/productPage/index.js
import { CircularProgress } from "@material-ui/core";
import React, { Component } from "react";
import { connect } from "react-redux";
import { addToCart } from "../../actions/user_actions";
import {
clearProductDetail,
getGuitarById
} from "../../actions/product_actions";
import PageTop from "../utils/PageTop";
import ProductImg from "./ProductImg";
import ProductInfo from "./ProductInfo";
class ProductPage extends Component {
state = {
isLoading: true
};
componentDidMount() {
const id = this.props.match.params.id;
this.props.dispatch(getGuitarById(id)).then(res => {
if (!this.props.guitar) this.props.history.push("/shop");
});
this.setState({ isLoading: false });
}
componentWillUnmount() {
this.props.dispatch(clearProductDetail());
}
addToCart = id => {
this.props.dispatch(addToCart(id));
};
render() {
return (
<>
<PageTop title="product detail" />
<div className="container">
{this.props.isLoading && (
<CircularProgress style={{ color: "#00bcd4" }} thickness={7} />
)}
{!this.props.isLoading && this.props.guitar && (
<div className="product_detail_wrapper">
<div className="left">
<div style={{ width: "500px" }}>
<ProductImg detail={this.props.guitar} />
</div>
</div>
<div className="right">
<ProductInfo
detail={this.props.guitar}
user={this.props.user}
addToCart={this.addToCart}
/>
</div>
</div>
)}
</div>
</>
);
}
}
const mapStateToProps = state => {
return { guitar: state.products.guitar, user: state.user.userData };
};
export default connect(mapStateToProps)(ProductPage);
<file_sep>/client/src/components/home/index.js
import React, { Component } from "react";
import HomePromotion from "./HomePromotion";
import HomeSlider from "./HomeSlider";
import { connect } from "react-redux";
import {
getProductByArrival,
getProductBySell
} from "../../actions/product_actions";
import CardBlock from "../utils/card/CardBlock";
class Home extends Component {
componentDidMount() {
this.props.dispatch(getProductByArrival());
this.props.dispatch(getProductBySell());
}
render() {
return (
<div>
<HomeSlider />
<CardBlock list={this.props.products.bySell} title="best selling" />
<HomePromotion />
<CardBlock list={this.props.products.byArrival} title="new arrival" />
</div>
);
}
}
const mapStateToProps = state => {
return { products: state.products };
};
export default connect(mapStateToProps)(Home);
<file_sep>/client/src/components/utils/card/CardBlock.js
import React from "react";
import Card from "./Card";
const CardBlock = ({ title, list }) => {
const renderCards = list =>
list && list.map((card, i) => <Card key={i} {...card} />);
return (
<div className="card_block">
<div className="container">
{title && <div className="title">{title}</div>}
<div style={{ display: "flex", flexWrap: "wrap" }}>
{renderCards(list)}
</div>
</div>
</div>
);
};
export default CardBlock;
<file_sep>/client/src/components/productPage/ProductInfo.js
import React from "react";
import Button from "../utils/Button";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { faTruck, faCheck, faTimes } from "@fortawesome/free-solid-svg-icons";
import { withRouter } from "react-router-dom";
const ProductInfo = props => {
const { detail } = props;
const showProdTage = detail => (
<div className="product_tags">
{detail.shipping && (
<div className="tag">
<FontAwesomeIcon icon={faTruck} />
<div className="tag_text">
<div>Free shipping</div>
<div>and return</div>
</div>
</div>
)}
{detail.available && (
<div className="tag">
<FontAwesomeIcon icon={faCheck} />
<div className="tag_text">
<div>Available</div>
<div>in store</div>
</div>
</div>
)}
{!detail.available && (
<div className="tag">
<FontAwesomeIcon icon={faTimes} />
<div className="tag_text">
<div>unavailable</div>
<div>Preorder only</div>
</div>
</div>
)}
</div>
);
const addCart = () => {
if (props.user.isAuth) return props.addToCart(props.match.params.id);
return props.history.push("/register_login");
};
const showProdActions = detail => (
<div className="product_actions">
<div className="price">$ {detail.price}</div>
<div className="cart">
<Button type="add_to_cart_link" runAction={addCart} />
{/* ******************* DELETE *************************/}
{/* <Button
linkTo={`/admin/modify_product/${props.match.params.id}`}
title="modify info"
/> */}
</div>
</div>
);
const showProdSpecifications = detail => (
<div className="product_specifications">
<h2>specifications</h2>
<>
<div className="item">
<strong>Frets:</strong>
{detail.frets}
</div>
<div className="item">
<strong>Wood:</strong>
{detail.wood.name}
</div>
</>
</div>
);
return (
<>
<h1>
{detail.brand.name} {detail.name}
</h1>
<p>{detail.description}</p>
{showProdTage(detail)}
{showProdActions(detail)}
{showProdSpecifications(detail)}
</>
);
};
export default withRouter(ProductInfo);
<file_sep>/client/src/components/userDashboard/UserCart.js
import { faFrown, faSmile } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import React, { Component } from "react";
import { connect } from "react-redux";
import { CircularProgress } from "@material-ui/core";
import {
getCartItem,
removeCartItem,
orderSuccess
} from "../../actions/user_actions";
import UserLayout from "../../hoc/UserLayout";
import Paypal from "../utils/Paypal";
import ProductBlock from "../utils/user/ProductBlock";
//<KEY>
class UserCart extends Component {
state = {
loading: true,
showSuccess: false,
total: 0,
showTotal: false
};
componentDidMount() {
let cartItem = [];
let user = this.props.user.userData;
if (!user.cart || !user.cart.length)
return this.setState({ loading: false });
user.cart.forEach(item => cartItem.push(item.id));
this.props.dispatch(getCartItem(cartItem, user.cart)).then(() => {
this.calculateTotal();
this.setState({ loading: false });
});
}
calculateTotal = () => {
let total = 0;
let cart = this.props.user.cartDetail;
cart.forEach(item => (total += parseInt(item.price, 10) * item.quantity));
this.setState({ total, showTotal: true });
};
showNoItemMessage = () => (
<div className="cart_no_items">
<FontAwesomeIcon icon={faFrown} />
<div style={{ textTransform: "uppercase" }}>You have no items</div>
</div>
);
showSuccessMessage = () => (
<div className="cart_success">
<FontAwesomeIcon icon={faSmile} />
<div style={{ textTransform: "uppercase" }}>
Your order has been placed successfully
</div>
</div>
);
removeFromCart = (id, quantity) => {
this.props.dispatch(removeCartItem(id, quantity)).then(() => {
const { cartTotal } = this.props.user.userData;
if (!cartTotal) return this.setState({ showTotal: false });
this.calculateTotal();
});
};
transactionError = () => {};
transactionCancelled = () => {};
transactionSuccess = data => {
this.props
.dispatch(
orderSuccess({
cartDetail: this.props.user.cartDetail,
paymentData: data
})
)
.then(() => {
if (this.props.user.success) {
this.setState({ showTotal: false, showSuccess: true });
}
});
};
render() {
const { showTotal, total, showSuccess, loading } = this.state;
return (
<UserLayout>
<h1>My Cart</h1>
{loading ? (
<CircularProgress style={{ color: "#00bcd4" }} thickness={7} />
) : (
<div className="user_cart">
<ProductBlock
products={this.props.user.cartDetail}
type="cart"
removeItem={(id, quantity) => this.removeFromCart(id, quantity)}
/>
{showTotal && (
<div className="user_cart_sum">Total amount: $ {total}</div>
)}
{!showTotal && !showSuccess && this.showNoItemMessage()}
{!showTotal && showSuccess && this.showSuccessMessage()}
{showTotal && (
<div className="paypal_button_container">
<Paypal
toPay={total}
transactionError={data => this.transactionError(data)}
transactionCancelled={data => this.transactionCancelled(data)}
onSuccess={data => this.transactionSuccess(data)}
/>
</div>
)}
</div>
)}
</UserLayout>
);
}
}
const mapStateToProps = state => {
return { user: state.user };
};
export default connect(mapStateToProps)(UserCart);
<file_sep>/client/src/components/utils/ImageLightBox.js
import React, { Component } from "react";
import Lightbox from "react-image-lightbox";
class ImageLightBox extends Component {
state = {
isOpen: true,
photoIndex: this.props.pos,
images: [...this.props.images]
};
// static getDerivedStateFromProps(props, state) {
// if (!props.images) return false;
// const images = [];
// props.images.forEach(element => images.push({ src: `${element}` }));
// return (state = { images });
// }
closeLightBox = () => {
this.props.onclose();
};
goToPrevious = () => {
this.setState({
photoIndex: this.state.currentImage - 1
});
};
goToNext = () => {
this.setState({
photoIndex: this.state.currentImage + 1
});
};
render() {
// console.log(this.sta;
const { photoIndex, images } = this.state;
return (
<Lightbox
mainSrc={images[photoIndex]}
nextSrc={images[(photoIndex + 1) % images.length]}
prevSrc={images[(photoIndex + images.length - 1) % images.length]}
onCloseRequest={this.closeLightBox}
onMovePrevRequest={this.goToPrevious}
onMoveNextRequest={this.goToNext}
/>
);
}
}
export default ImageLightBox;
<file_sep>/client/src/components/utils/CollapseCheckBox.js
import React, { Component } from "react";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { faAngleUp, faAngleDown } from "@fortawesome/free-solid-svg-icons";
import {
List,
ListItem,
ListItemSecondaryAction,
ListItemText,
Checkbox,
Collapse,
Radio,
RadioGroup,
FormControlLabel
} from "@material-ui/core";
class CollapseCheckBox extends Component {
state = {
open: false,
checked: [],
value: "0"
};
componentDidMount() {
this.setState({ open: this.props.open });
}
handleClick = () => {
this.setState({ open: !this.state.open });
};
handleAngle = () => {
let iconAngle = this.state.open ? faAngleUp : faAngleDown;
return <FontAwesomeIcon icon={iconAngle} className="icon" />;
};
handleToggle = id => {
const { checked } = this.state;
const currentIndex = checked.indexOf(id);
const newChecked = [...checked];
if (currentIndex === -1) {
newChecked.push(id);
} else {
newChecked.splice(currentIndex, 1);
}
this.setState({ checked: newChecked }, () => {
this.props.handleFilters(newChecked);
});
};
handleOnChange = e => {
const { value } = e.target;
this.setState(
{
value
},
() => this.props.handleFilters(value)
);
};
// switch case between title of prices and the rest
renderList = () => {
if (!this.props.list) return null;
if (this.props.title === "Prices") {
return this.props.list.map(item => (
<FormControlLabel
key={item._id}
value={item._id}
control={<Radio />}
label={item.name}
/>
));
}
return this.props.list.map(brand => (
<ListItem key={brand._id} style={{ padding: "10px 0" }}>
<ListItemText primary={brand.name} />
<ListItemSecondaryAction>
<Checkbox
color="primary"
onChange={e => this.handleToggle(brand._id)}
/>
</ListItemSecondaryAction>
</ListItem>
));
};
render() {
return (
<div className="collapse_items_wrapper">
<List style={{ borderBottom: "1px solid #dbdbdb" }}>
<ListItem
onClick={this.handleClick}
style={{ padding: "10px 23px 10px 0" }}
>
<ListItemText
className="collapse_title"
primary={this.props.title}
/>
{this.handleAngle()}
</ListItem>
{this.props.title !== "Prices" && (
<Collapse in={this.state.open} timeout="auto" unmountOnExit>
<List component="div" disablePadding>
{this.renderList()}
</List>
</Collapse>
)}
{this.props.title === "Prices" && (
<RadioGroup
aria-label="prices"
name="prices"
value={this.state.value}
onChange={e => this.handleOnChange(e)}
>
{this.renderList()}
</RadioGroup>
)}
</List>
</div>
);
}
}
export default CollapseCheckBox;
| 6f7fecfe131ed2932ca8f743fabeb0b02007d014 | [
"JavaScript"
] | 19 | JavaScript | xwang985/wave_guitar_shop | 48a85364560ae6fe873bb15dd627d21976748b3a | 5aa73e092be9bc7066576017875f03293149b118 |
refs/heads/master | <repo_name>IgoSaldanha/ProjectGameJam<file_sep>/Assets/MineGames/FoodMan/Scripts/PlayerController.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class PlayerController : MonoBehaviour {
public float speed;//the speed pacman can travel
public int score = 0;//the score
public int livesLeft = 2;//how many extras lives pacman has left
public Text scoreText;//the Text UI Component that shows the score
public Image life1;
public Image life2;
private Vector2 direction;//the direction pacman is going
private bool alive = true;
Rigidbody2D rb2d;
Animator animator;
// Use this for initialization
void Start () {
rb2d = GetComponent<Rigidbody2D>();
animator = GetComponent<Animator>();
}
// Update is called once per frame
void FixedUpdate () {
if (alive)
{
animator.SetFloat("currentSpeed", rb2d.velocity.magnitude);
if (Input.GetAxis("Horizontal") < 0)
{
direction = Vector2.left;
}
if (Input.GetAxis("Horizontal") > 0)
{
direction = Vector2.right;
}
if (Input.GetAxis("Vertical") < 0)
{
direction = Vector2.down;
}
if (Input.GetAxis("Vertical") > 0)
{
direction = Vector2.up;
}
rb2d.velocity = direction * speed;
transform.up = direction;
if (rb2d.velocity.x == 0)
{
transform.position = new Vector2(Mathf.Round(transform.position.x), transform.position.y);
}
if (rb2d.velocity.y == 0)
{
transform.position = new Vector2(transform.position.x, Mathf.Round(transform.position.y));
}
}
}
public void addPoints(int pointsToAdd)
{
score += pointsToAdd;
scoreText.text = ""+score;
}
public void setAlive(bool isAlive)
{
alive = isAlive;
animator.SetBool("alive", alive);
rb2d.velocity = Vector2.zero;
}
public void setLivesLeft(int lives)
{
livesLeft = lives;
life1.enabled = livesLeft >= 1;
life2.enabled = livesLeft >= 2;
}
}
<file_sep>/Assets/MineGames/FoodMan/Scripts/GameManager.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class GameManager : MonoBehaviour {
public enum GameState
{
PLAY, PACMAN_DYING, PACMAN_DEAD, GAME_OVER, GAME_WON
};
public GameState gameState = GameState.PLAY;
[Range(1,10)]
public float ghostVulnerableDuration = 7.0f;//how long the ghosts should be vulnerable for
[Range(1,5)]
public float ghostVulnerableEndWarningDuration = 2.0f;
public Image gameWonScreen;
public Image gameOverScreen;
public GameObject pacman;
public AnimationClip pacmanDeathAnimation;
public List<GameObject> ghosts;
public List<GameObject> pills;
public AudioSource pacmanKilledSound;
public AudioSource gameWonSound;
public AudioSource gameOverSound;
private static GameManager instance;
private float respawnTime;
private float invulnerableTime = 0;//when the ghosts will become invulnerable again
// Use this for initialization
void Start () {
if (instance == null)
{
instance = this;
}
else
{
Destroy(gameObject);
}
gameOverScreen.enabled = false;
gameWonScreen.enabled = false;
}
// Update is called once per frame
void Update () {
switch (gameState)
{
case GameState.PLAY:
bool foundPill = false;
foreach (GameObject pill in pills)
{
if (pill.activeSelf)
{
foundPill = true;
break;
}
}
if (!foundPill)
{
gameState = GameState.GAME_WON;
}
break;
case GameState.PACMAN_DYING:
if (Time.time > respawnTime)
{
gameState = GameState.PACMAN_DEAD;
respawnTime = Time.time + 1;
pacman.SetActive(false);
}
break;
case GameState.PACMAN_DEAD:
if (Time.time > respawnTime)
{
gameState = GameState.PLAY;
pacman.SetActive(true);
PlayerController playerController = pacman.GetComponent<PlayerController>();
playerController.setLivesLeft(playerController.livesLeft - 1);
if (playerController.livesLeft >= 0)
{
playerController.setAlive(true);
}
else
{
gameState = GameState.GAME_OVER;
}
pacman.transform.position = Vector2.zero;
foreach (GameObject ghost in ghosts)
{
ghost.GetComponent<GhostController>().reset();
}
}
break;
case GameState.GAME_OVER:
gameOverScreen.enabled = true;
gameWonScreen.enabled = false;
if (!gameOverSound.isPlaying)
{
gameOverSound.Play();
}
if (Input.anyKeyDown)
{
resetGame();
gameState = GameState.PLAY;
gameOverScreen.enabled = false;
gameWonScreen.enabled = false;
}
break;
case GameState.GAME_WON:
gameOverScreen.enabled = false;
gameWonScreen.enabled = true;
BarraVida.vidaAtual += 80;
if (!gameWonSound.isPlaying)
{
gameWonSound.Play();
}
if (Input.anyKeyDown)
{
resetGame();
gameState = GameState.PLAY;
gameOverScreen.enabled = false;
gameWonScreen.enabled = false;
}
break;
}
//Ghost Vulnerability
if (invulnerableTime > 0)
{
if (Time.time > invulnerableTime)
{
invulnerableTime = 0;
foreach (GameObject ghost in ghosts)
{
ghost.GetComponent<GhostController>().setVulnerable(false);
}
}
else if (Time.time > invulnerableTime - ghostVulnerableEndWarningDuration
&& (Time.time *10)%2 < 0.1f)
{
foreach (GameObject ghost in ghosts)
{
ghost.GetComponent<GhostController>().blink();
}
}
}
if (Input.GetKeyDown(KeyCode.Escape))
{
Application.Quit();
}
}
public static void pacmanKilled()
{
instance.pacman.GetComponent<PlayerController>().setAlive(false);
instance.gameState = GameState.PACMAN_DYING;
instance.respawnTime = Time.time + instance.pacmanDeathAnimation.length;
instance.pacmanKilledSound.Play();
foreach (GameObject ghost in instance.ghosts)
{
ghost.GetComponent<GhostController>().freeze(true);
}
}
public void resetGame()
{
pacman.transform.position = Vector2.zero;
PlayerController playerController = pacman.GetComponent<PlayerController>();
playerController.setLivesLeft(2);
playerController.setAlive(true);
foreach (GameObject ghost in ghosts)
{
ghost.GetComponent<GhostController>().reset();
}
foreach (GameObject pill in pills)
{
pill.SetActive(true);
}
}
public static void makeGhostsVulnerable()
{
instance.invulnerableTime = Time.time + instance.ghostVulnerableDuration;
foreach (GameObject ghost in instance.ghosts)
{
ghost.GetComponent<GhostController>().setVulnerable(true);
}
}
}
<file_sep>/Assets/Scripts/CutSceneAtivador.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Playables;
using UnityEngine.Events;
public class CutSceneAtivador : MonoBehaviour
{
public PlayableDirector cutScene;
public bool playerOnTrigger;
private bool playerIsTrigger;
public GameObject Destruir;
public GameObject canvas;
void Update()
{
if (playerIsTrigger) {
if (Input.GetKey(KeyCode.E)){
StartCutScene();
BarraVida.vidaAtual += 30f;
Destruir.SetActive(false);
if (canvas != null)
{
canvas.SetActive(false);
}
}
}
}
void StartCutScene() {
cutScene.Play();
}
private void OnTriggerEnter(Collider other)
{
if (other.CompareTag("Player")) {
playerIsTrigger = true;
if (canvas != null) {
canvas.SetActive(true);
}
}
}
private void OnTriggerExit(Collider other)
{
if (other.CompareTag("Player"))
{
if (canvas != null)
{
canvas.SetActive(false);
}
playerIsTrigger = false;
}
}
}
<file_sep>/Assets/Scripts/easterEgg.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class easterEgg : MonoBehaviour
{
public bool playerOnTrigger;
private bool playerIsTrigger;
public GameObject objeto;
//public GameObject canvas;
void Update()
{
if (playerIsTrigger)
{
BarraVida.vidaAtual += 30f;
objeto.SetActive(false);
}
}
private void OnTriggerEnter(Collider other)
{
if (other.CompareTag("Player"))
{
playerIsTrigger = true;
}
}
private void OnTriggerExit(Collider other)
{
if (other.CompareTag("Player"))
{
playerIsTrigger = false;
}
}
}
<file_sep>/Assets/Scripts/BarraVida.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class BarraVida : MonoBehaviour
{
public Image barraVidaUI;
private float vidaMaximo = 60;
static public float vidaAtual;
void Start()
{
vidaAtual = vidaMaximo;
}
void Update() {
if (vidaAtual >= vidaMaximo) {
vidaAtual = vidaMaximo;
}
barraVidaUI.rectTransform.sizeDelta = new Vector2(vidaAtual / vidaMaximo * 120, 12);
vidaAtual -= Time.deltaTime;
if (vidaAtual < 0)
{
UnityEngine.SceneManagement.SceneManager.LoadScene("Perdeu");
}
}
}
<file_sep>/Assets/Scripts/Camera3Pessoa.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Experimental.GlobalIllumination;
public class Camera3Pessoa : MonoBehaviour
{
public GameObject cabeca;
public GameObject[] posicoes;
public int indise = 0;
public float velocidadeMovimenoto = 2;
private RaycastHit hit;
void FixedUpdate()
{
transform.LookAt(cabeca.transform);
//Chekcar se tem colisor
if (!Physics.Linecast(cabeca.transform.position, posicoes[indise].transform.position))
{
transform.position = Vector3.Lerp(transform.position, posicoes[indise].transform.position, velocidadeMovimenoto * Time.deltaTime);
Debug.DrawLine(cabeca.transform.position, posicoes[indise].transform.position);
}
else if (Physics.Linecast(cabeca.transform.position, posicoes[indise].transform.position, out hit)){
transform.position = Vector3.Lerp(transform.position, hit.point, (velocidadeMovimenoto * 2) * Time.deltaTime);
Debug.DrawLine(cabeca.transform.position, hit.point);
}
}
void Update()
{
if (ConfigPause.pause) return ;
if (Input.GetKeyDown("v") && indise < (posicoes.Length - 1)) {
indise++;
}
else if (Input.GetKeyDown("v") && indise >= (posicoes.Length - 1))
{
indise = 0;
}
}
}
<file_sep>/Assets/Scripts/BarraDia.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class BarraDia : MonoBehaviour
{
public Image barraDiaUI;
private float HoraMaximo = 210;
static public float HoraAtual;
void Start()
{
HoraAtual = HoraMaximo;
}
void Update()
{
if (HoraAtual >= HoraMaximo)
{
HoraAtual = HoraMaximo;
}
barraDiaUI.rectTransform.sizeDelta = new Vector2(HoraAtual / HoraMaximo * 120, 12);
HoraAtual -= Time.deltaTime;
if (HoraAtual < 0)
{
UnityEngine.SceneManagement.SceneManager.LoadScene("Ganhou");
}
}
}
<file_sep>/README.md
#  Quarantine Day 
## Download do Jogo ( Windows )
* [Quarantine Day](https://github.com/IgoSaldanha/ProjectGameJam/raw/master/Bulids/QuarantineDayBeta1.0.rar)
## Softwares Usados
* [Unity](https://unity.com/pt) - Game Engine
* [Visual Studio](https://visualstudio.microsoft.com/pt-br/) - IDE
* [Fuse](https://store.steampowered.com/app/257400/Fuse/?l=portuguese) - Plataforma de Criação do Personagem
* [Mixamo](https://www.mixamo.com/#/) - Plataforma de Animação do Personagem
* [SketchUp](https://www.sketchup.com/pt-BR) - Plataforma de Projetos 3D
## Referências
* [Efeitos Sonoros 1](https://sonniss.com/gameaudiogdc2016/)
* [Efeitos Sonoros 2](https://freesound.org/browse/)
* [Sprites](https://br.freepik.com)
## Criadores
* *Líder* - **<NAME>**
* *Desing* - **<NAME>**
* *Programador* - **<NAME>**
* *Sonoplasta* - **<NAME>**
## Iniciativa
* Game Jam proposta pelo professor <NAME>.
<file_sep>/Assets/Scripts/TriggersParaVida.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class TriggersParaVida : MonoBehaviour
{
public bool playerOnTrigger;
private bool playerIsTrigger;
public GameObject objeto;
public GameObject canvas;
void Update()
{
if (playerIsTrigger)
{
if (Input.GetKey(KeyCode.E))
{
objeto.SetActive(false);
BarraVida.vidaAtual += 20f;
if (canvas != null)
{
canvas.SetActive(false);
}
}
}
}
private void OnTriggerEnter(Collider other)
{
if (other.CompareTag("Player"))
{
playerIsTrigger = true;
if (canvas != null)
{
canvas.SetActive(true);
}
}
}
private void OnTriggerExit(Collider other)
{
if (other.CompareTag("Player"))
{
if (canvas != null)
{
canvas.SetActive(false);
}
playerIsTrigger = false;
}
}
}
<file_sep>/Assets/MineGames/FoodMan/Scripts/TeleportPadChecker.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class TeleportPadChecker : MonoBehaviour {
public Vector2 sendToPos = Vector2.zero;
void OnTriggerEnter2D(Collider2D coll)
{
coll.gameObject.transform.position = sendToPos;
}
}
<file_sep>/Assets/Scripts/TrggersParaMineGames.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class TrggersParaMineGames : MonoBehaviour
{
// Dados de presença no Colisor
public bool playerOnTrigger;
private bool playerIsTrigger;
// Canvas com o detalhe(nome) da ação a ser execultada
public GameObject canvas;
//Objetos desativdos
public GameObject CanvasDeBarras;
public GameObject player;
public GameObject casa;
public GameObject concertosCasa;
public GameObject MainCamera;
// Verificador de finalização
static public bool GameFinish;
private bool receptor;
// Ativadores dos minegames
public GameObject MineGame;
private bool modificador;
void Start()
{
GameFinish = false;
MineGame = Instantiate(MineGame, MineGame.transform.position, MineGame.transform.rotation) as GameObject;
Jogando(false);
}
/*
private void FixedUpdate()
{
// Verificar se o minegame foi concluido
if (GameFinish == true) {
//modificador = false;
receptor = false;
Jogando(receptor);
CanvasDeBarras.SetActive(!modificador);
player.SetActive(!modificador);
casa.SetActive(!modificador);
concertosCasa.SetActive(!modificador);
MainCamera.SetActive(!modificador);
}
}
*/
// Ativar a Instancia do minegame em tela
void Jogando(bool statusJogo)
{
modificador = statusJogo;
MineGame.SetActive(modificador);
}
void Update()
{
// Verificar se esta colidindo
if (playerIsTrigger)
{
if (Input.GetKey(KeyCode.E))
{
Jogando(!modificador);
CanvasDeBarras.SetActive(!modificador);
player.SetActive(!modificador);
casa.SetActive(!modificador);
concertosCasa.SetActive(!modificador);
MainCamera.SetActive(!modificador);
if (canvas != null)
{
canvas.SetActive(false);
}
}
}
}
// Esta colidindo
private void OnTriggerEnter(Collider other)
{
if (other.CompareTag("Player"))
{
playerIsTrigger = true;
if (canvas != null)
{
canvas.SetActive(true);
}
}
}
// Não esta colidindo
private void OnTriggerExit(Collider other)
{
if (other.CompareTag("Player"))
{
if (canvas != null)
{
canvas.SetActive(false);
}
playerIsTrigger = false;
}
}
}
<file_sep>/Assets/MineGames/FoodMan/Scripts/Utility.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public static class Utility
{
public static Vector2[] directions = new Vector2[]{
Vector2.up,
Vector2.right,
Vector2.down,
Vector2.left
};
public static Vector3 PerpendicularRight(Vector2 v)
{
for(int i = 0; i < directions.Length; i++)
{
Vector2 dir = directions[i];
if (dir == v)
{
int nextDir = i+1;
if (nextDir == directions.Length)
{
nextDir = 0;
}
return directions[nextDir];
}
}
throw new KeyNotFoundException("Vector " + v + " is not a horizontal or vertical vector.");
}
public static Vector3 PerpendicularLeft(Vector2 v)
{
for (int i = 0; i < directions.Length; i++)
{
Vector2 dir = directions[i];
if (dir == v)
{
int nextDir = i-1;
if (nextDir < 0)
{
nextDir = directions.Length-1;
}
return directions[nextDir];
}
}
throw new KeyNotFoundException("Vector " + v + " is not a horizontal or vertical vector.");
}
}
<file_sep>/Assets/Scripts/MenuInicial.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class MenuInicial : MonoBehaviour
{
void Start()
{
}
void Update()
{
}
public void Iniciar() {
UnityEngine.SceneManagement.SceneManager.LoadScene("SampleScene");
}
public void TelaSobre()
{
UnityEngine.SceneManagement.SceneManager.LoadScene("Sobre");
}
public void SairJogo() {
Application.Quit();
}
public void VoltarMenu()
{
UnityEngine.SceneManagement.SceneManager.LoadScene("MenuInicial");
}
}
<file_sep>/Assets/MineGames/FoodMan/Scripts/SuperPill.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class SuperPill : Collectable {
protected override void collected(Collider2D coll)
{
GameManager.makeGhostsVulnerable();
base.collected(coll);
}
}
<file_sep>/Assets/MineGames/FoodMan/Scripts/Collectable.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Collectable : MonoBehaviour {
public int points = 100;//how many points to give the player upon collection
public AudioClip collectSound;
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
void OnTriggerEnter2D(Collider2D coll)
{
if (coll.gameObject.tag == "Player")
{
collected(coll);
}
}
protected virtual void collected(Collider2D coll)
{
coll.gameObject.GetComponent<PlayerController>().addPoints(points);
AudioSource.PlayClipAtPoint(collectSound, transform.position);
gameObject.SetActive(false);
}
}
<file_sep>/Assets/Scripts/PersonagenWalk.cs
using JetBrains.Annotations;
using System.Collections;
using System.Collections.Generic;
using System.Data;
using UnityEngine;
using UnityEngine.Playables;
using UnityEngine.Video;
public class PersonagenWalk : MonoBehaviour
{
public AudioSource somPassos;
public float rotacionar = 200;
private Animator _animator;
private float _andar = 0;
private int _danca = 0;
private int _flexao = 0;
private int _sentar = 0;
private int _abdominal = 0;
public GameObject ControladorDeVida;
public float TimeInicial = 0;
public float vida = 0;
void Start()
{
_animator = GetComponent<Animator>();
}
void Update()
{
if (ConfigPause.pause) return;
_andar = Input.GetAxis("Vertical");
if (Input.GetKeyDown(KeyCode.U)) // Fazer Abdominal
{
_abdominal += 1;
}
if (Input.GetKeyUp(KeyCode.U)) // Parar Abdominal
{
vida = 0;
_abdominal += -1;
}
if (Input.GetKeyDown(KeyCode.I)) // fazer flexão
{
_flexao += 1;
}
if (Input.GetKeyUp(KeyCode.I)) // parar flexão
{
_flexao += -1;
}
if (Input.GetKeyDown(KeyCode.O)) // dança
{
_danca += 1;
}
if (Input.GetKeyUp(KeyCode.O)) // parar dança
{
_danca += -1;
}
if (Input.GetKey(KeyCode.LeftShift))
{
_andar += 1;
}
if (Input.GetKeyUp(KeyCode.LeftShift))
{
_andar = 1;
}
// Controle de animações
_animator.SetInteger("Abdominal", _abdominal);
_animator.SetInteger("Sentar", _sentar);
_animator.SetInteger("Flexao", _flexao);
_animator.SetInteger("Danca", _danca);
_animator.SetFloat("Andar", _andar);
// Movimentar a camera
if (_danca == 0 || _flexao == 0 || _sentar == 0 || _abdominal == 0)
{
this.transform.Rotate(0, (Input.GetAxis("Horizontal") * rotacionar) * Time.deltaTime, 0);
}
}
public void SondPassos()
{
somPassos.Play();
}
}<file_sep>/Assets/MineGames/FoodMan/Scripts/GhostController.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class GhostController : MonoBehaviour {
public float speed = 1.0f;//the speed this ghost can travel
public Vector2 direction = Vector2.up;//the direction this ghost is going
public Color vulnerableColor = Color.blue;
public int points = 400;//how many points you get for eating this ghost
private float changeDirectionTime;//the soonest that he can change direction
private Vector2 originalPosition;
private Color originalColor;
private bool frozen = false;
private bool vulnerable = false;
private bool eaten = false;
private Rigidbody2D rb2d;
private CircleCollider2D cc2d;
private SpriteRenderer sr;
private AudioSource ghostEatenSound;
// Use this for initialization
void Start () {
rb2d = GetComponent<Rigidbody2D>();
cc2d = GetComponent<CircleCollider2D>();
sr = GetComponent<SpriteRenderer>();
ghostEatenSound = GetComponent<AudioSource>();
originalPosition = transform.position;
originalColor = sr.color;
}
// Update is called once per frame
void Update () {
if (!frozen)
{
if (!eaten)
{
//Wall Bump Detection
if (!openDirection(direction))
{
if (canChangeDirection())
{
changeDirection();
}
else if (rb2d.velocity.magnitude < speed)
{
changeDirectionAtRandom();
}
}
//Come Across an Intersection
else if (canChangeDirection() && Time.time > changeDirectionTime)
{
changeDirectionAtRandom();
}
//Stuck on a non-wall
else if (rb2d.velocity.magnitude < speed)
{
changeDirectionAtRandom();
}
}
else
{
//Check to see if it's arrived
if (Vector2.Distance(originalPosition, transform.position) < 0.1f)
{
transform.position = originalPosition;
setEaten(false);
}
}
//Rotate Eyes
foreach (Transform t in GetComponentsInChildren<Transform>())
{
if (t != transform)
{
t.up = direction;
}
}
//Move
rb2d.velocity = direction * speed;
if (rb2d.velocity.x == 0)
{
transform.position = new Vector2(Mathf.Round(transform.position.x), transform.position.y);
}
if (rb2d.velocity.y == 0)
{
transform.position = new Vector2(transform.position.x, Mathf.Round(transform.position.y));
}
}
}
private bool openDirection(Vector2 direction)
{
RaycastHit2D[] rch2ds = new RaycastHit2D[10];
cc2d.Cast(direction, rch2ds, 1f, true);
foreach (RaycastHit2D rch2d in rch2ds)
{
if (rch2d && rch2d.collider.gameObject.tag == "Level")
{
return false;
}
}
return true;
}
private bool canChangeDirection()
{
Vector2 perpRight = Utility.PerpendicularRight(direction);
bool openRight = openDirection(perpRight);
Vector2 perpLeft = Utility.PerpendicularLeft(direction);
bool openLeft = openDirection(perpLeft);
return openRight || openLeft;
}
private void changeDirectionAtRandom()
{
changeDirectionTime = Time.time + 1;
if (Random.Range(0, 2) == 0)
{
changeDirection();
}
}
private void changeDirection()
{
changeDirectionTime = Time.time + 1;
Vector2 perpRight = Utility.PerpendicularRight(direction);
bool openRight = openDirection(perpRight);
Vector2 perpLeft = Utility.PerpendicularLeft(direction);
bool openLeft = openDirection(perpLeft);
if (openRight || openLeft)
{
int choice = Random.Range(0, 2);
if (!openLeft || (choice == 0 && openRight))
{
direction = perpRight;
}
else
{
direction = perpLeft;
}
}
else
{
direction = -direction;
}
}
void OnCollisionEnter2D(Collision2D coll)
{
if (coll.gameObject.tag == "Player")
{
if (vulnerable)
{
coll.gameObject.GetComponent<PlayerController>().addPoints(points);
setEaten(true);
}
else {
GameManager.pacmanKilled();
}
}
}
public void reset()
{
transform.position = originalPosition;
freeze(false);
}
public void freeze(bool freeze)
{
frozen = freeze;
rb2d.velocity = Vector2.zero;
}
public void setVulnerable(bool isVulnerable)
{
vulnerable = isVulnerable;
if (vulnerable)
{
sr.color = vulnerableColor;
}
else
{
sr.color = originalColor;
}
}
public void setEaten(bool isEaten)
{
eaten = isEaten;
if (eaten)
{
sr.color = new Color(0, 0, 0, 0);
cc2d.enabled = false;
direction = originalPosition - (Vector2)transform.position;
ghostEatenSound.Play();
}
else
{
sr.color = originalColor;
cc2d.enabled = true;
direction = Vector2.up;
}
}
public void blink()
{
if (sr.color == originalColor)
{
sr.color = vulnerableColor;
}
else if (sr.color == vulnerableColor)
{
sr.color = originalColor;
}
}
}
| 820ee0849e101845ed8d7daabc403154a9fad761 | [
"Markdown",
"C#"
] | 17 | C# | IgoSaldanha/ProjectGameJam | 9b65499b30ac8d5a9f03c92736b4c17e7a1a8d56 | f965ac5790e1a95034a3a946595555e6c7faee18 |
refs/heads/master | <repo_name>Akapoor99/awesomeTravels<file_sep>/src/FlightBooking.php
<?php
require 'Flight.php';
class FlightBooking
{
private $flight;
private $flightTicketID;
//settype($flightTicketID, "string");
private $price;
//settype($price, "float");
private $porterService;
//settype($porterService, "boolean");
function __construct($_flight, $_ID, $_price, $_porter){
$this->flight = $_flight;
$this->flight->changeSeatNumbers(-1);
$this->flightTicketID = $_ID;
$this->price = $_price;
$this->porterService = $_porter;
}
function getTicketID(){
return $this->flightTicketID;
}
function getPrice(){
return $this->price;
}
function hasPorter(){
return $this->porterService;
}
function getFlightInfo(){
return $this->flight->getAllInfo();
}
}
?>
<file_sep>/HotelRegistry.php
<?php
require 'Hotel.php';
class HotelRegistry
{
private $hotelList;
//settype($hotelList, "array");
private $instance;
private function __construct(){
$this->hotelList = array();
}
public static function getInstance(){
if(!isset(self::$instance)){
self::$instance = new HotelRegistry();
}
return self::$instance;
}
function addHotel($newHotel){
$this->hotelList[] = $newHotel;
}
function searchHotels($location){
$searchResults = array();
foreach ($$hotelList as $value) {
if($value->getLocation()==$location){
$searchResults[] = $value;
}
}
return $searchResults;
}
}
?>
<file_sep>/src/Booking.php
<?php
require 'FlightBooking.php';
require 'HotelBooking.php';
require 'TaxiService.php';
class Booking
{
private $flightBookings;
private $hotelBookings;
private $taxiHires;
function __construct(){
settype($flightBookings, "array");
settype($hotelBookings, "array");
settype($taxiHires, "array");
}
function addFlightBooking($newFlightBooking){
$this->flightBookings[] = $newFlightBooking;
}
function getFlightBookingInfo($index){
$allInfo = array($this->flightBookings[$index]->getTicketID(), $this->flightBookings[$index]->getPrice(), $this->flightBookings[$index]->hasPorter(), $this->flightBookings[$index]->getFlightInfo());
return $allInfo;
}
function addHotelBooking($newHotelBooking){
$this->hotelBookings[] = $newHotelBooking;
}
function getHotelBookingInfo($index){
$allInfo = array($this->hotelBookings[$index]->getRooms(), $this->hotelBookings[$index]->getNumber(), $this->hotelBookings[$index]->getDuration(), $this->hotelBookings[$index]->getPrice(), $this->hotelBookings[$index]->getHotelInfo());
return $allInfo;
}
function addTaxiService($newTaxiService){
$this->taxiHires[] = $newTaxiService;
}
function getTaxiServiceInfo($index){
return $this->taxiHires[$index]->getTaxiInfo();
}
}
?>
<file_sep>/README.md
# AwesomeTravels
This is online travel booking system with few unique features like P2P, LM.
<file_sep>/HotelBooking.php
<?php
require 'Hotel.php';
class HotelBooking
{
private $hotel;
private $hotelRooms;
//settype($hotelRooms, "array");
private $numberOfPeople;
//settype($numberOfPeople, "int");
private $startDate;
private $duration;
//settype($duration, "int");
private $price;
//settype($price, "float");
function __construct($_hotel, $_rooms, $_number, $_start, $_duration, $_price){
$this->hotel = $_hotel;
$this->hotelRooms = $_rooms;
$this->numberOfPeople = $_number;
$this->duration = $_duration;
$this->startDate = new DateTime($_start);
$this->price = $_price;
}
function getHotelInfo(){
return $this->hotel->getAllInfo();
}
function getStartDate(){
return $this->startDate;
}
function getRooms(){
return $this->hotelRooms;
}
function getNumber(){
return $this->numberOfPeople;
}
function getDuration(){
return $this->duration;
}
function getPrice(){
return $this->price;
}
}
?>
<file_sep>/Flight.php
<?php
class Flight
{
private $flightNumber;
//settype($flightNumber, "string");
private $availabaleSeats;
private $flightDate;
//settype($availabaleSeats, "integer");
private $departureLocation;
//settype($departureLocation, "string");
private $departureTime;
//settype($departureTime, "string");
private $arrivalLocation;
//settype($arrivalLocation, "string");
private $arrivalTime;
//settype($arrivalTime, "string");
private $airline;
//settype($airline, "string");
private $airportD;
private $airportA;
private $price;
function __construct($number, $seats, $fdate, $dlocation, $dtime, $alocation, $atime, $airl, $airpD, $airpA, $price){
$this->flightNumber = $number;
$this->availabaleSeats = $seats;
$this->flightDate = new DateTime($fdate);
$this->departureLocation = $dlocation;
$this->departureTime = new DateTime($dtime);
$this->arrivalLocation = $alocation;
$this->arrivalTime = new DateTime($atime);
$this->airline = $airl;
$this->airportD = $airpD;
$this->airportA = $airpA;
$this->price = $price;
}
function getAllInfo(){
$allInfo = array($this->flightNumber, $this->availabaleSeats,$this->flightDate, $this->departureLocation, $this->departureTime->format('Y-m-d H:i:s'), $this->arrivalLocation, $this->arrivalTime->format('Y-m-d H:i:s'), $this->airline, $this->airportD, $this->airportA, $this->price);
return $allInfo;
}
function changeSeatNumbers($change){
$this->availabaleSeats = $this->availabaleSeats + $change;
}
function getAvailableSeats(){
return $this->availabaleSeats;
}
function getFlightNumber(){
return $this->flightNumber;
}
function getDate(){
return $this->flightDate;
}
function getDepartureLocation(){
return $this->departureLocation;
}
function getDepartureTime(){
return $this->departureTime;
}
function getArrivalLocation(){
return $this->arrivalLocation;
}
function getArrivalTime(){
return $this->arrivalTime;
}
function getAirline(){
return $this->airline;
}
function getAirportDeparture(){
return $this->airportD;
}
function getAirportArrival(){
return $this->airportA;
}
function getPrice(){
return $this->price;
}
}
?>
<file_sep>/FlightRegistry.php
<?php
require 'Flight.php';
class FlightRegistry
{
private $flightList;
private $instance;
private function __construct(){
$this->flightList= array();
}
public static function getInstance(){
if(!isset(self::$instance)){
self::$instance = new FlightRegistry();
}
return self::$instance;
}
function addFlight($newFlight){
$this->flightList[$newFlight->getFlightNumber()] = $newFlight;
}
function searchFlights($_date, $dloc, $aloc){
$searchResults = array();
foreach ($flightList as $value) {
if($value->getDate()==$_date && $value->getDepartureLocation()==$dloc && $value->getArrivalLocation()==$aloc){
$searchResults[] = $value;
}
}
return $searchResults;
}
}
?>
<file_sep>/Customer.php
<?php
require("Booking.php");
class Customer
{
protected $customerID;
protected $bookings;
function __construct(){
//settype($customerID, "string");
//settype($bookings, "array");
$this->customerID = uniqid("", true);
}
function getCustomerID(){
return $customerID;
}
function getBooking($index){
return $bookings[$index];
}
function addBooking($newBooking){
$bookings[] = $newBooking;
}
}
?>
<file_sep>/src/ContactInfo.php
<?php
class ContactInfo
{
private $firstName;
private $lastName;
private $email;
private $phoneNumber;
private $address;
function __construct($f, $l, $e, $p){
//settype($firstName, "string");
//settype($lastName, "string");
//settype($email, "string");
//settype($phoneNumber, "string");
//settype($address, "array");
$this->firstName = $f;
$this->lastName = $l;
$this->email = $e;
$this->phoneNumber = $p;
}
function getFirstName(){
return $this->firstName;
}
function getLastName(){
return $this->lastName;
}
function getEmail(){
return $this->email;
}
function setEmail($newEmail){
$this->email = $newEmail;
}
function getPhoneNumber(){
return $this->phoneNumber;
}
function setPhoneNumber($newNumber){
$this->phoneNumber = $newNumber;
}
function getAddress(){
return $this->address;
}
function setAddress($newAddress){
$this->address = $newAddress;
}
function getContactInfo(){
$allContactInfo = array($firstName, $lastName, $email, $phoneNumber, $address);
return $allContactInfo;
}
}
?>
<file_sep>/GroupTravelRegistry.php
<?php
require 'Member.php';
class GroupTravelRegistry
{
private $instance;
private $participantListUnder21;
//settype($participantListUnder21, "array");
private $participantList21to25;
//settype($participantList21to25, "array");
private $participantList26to30;
//settype($participantList26to30, "array");
function __construct(){
$this->participantListUnder21 = array();
$this->participantList21to25 = array();
$this->participantList26to30 = array();
}
public static function getInstance(){
if(!isset(self::$instance)){
self::$instance = new GroupTravelRegistry();
}
return self::$instance;
}
function addParticipant($newParticipant){
if($newParticipant->getAge() < 21){
$participantListUnder21[] = $newParticipant;
}
else if($newParticipant->getAge() >= 21 && $newParticipant->getAge() <=25){
$participantList21to25[] = $newParticipant;
}
else{
$participantList26to30[] = $newParticipant;
}
}
function filterByAge($member){
$filterResults = array();
if($member->getAge() < 21){
foreach ($participantListUnder21 as $value) {
if($value->getUsername() != $member->getUsername()){
$filterResults[] = $value;
}
}
return $filterResults;
}
else if($member->getAge() >= 21 && $member->getAge() <=25){
foreach ($participantList21to25 as $value) {
if($value->getUsername() != $member->getUsername()){
$filterResults[] = $value;
}
}
return $filterResults;
}
else{
foreach ($participantList26to30 as $value) {
if($value->getUsername() != $member->getUsername()){
$filterResults[] = $value;
}
}
return $filterResults;
}
}
function filterByLocation($location){
}
}
?>
<file_sep>/MemberRegistry.php
<?php
require 'Member.php';
class MemberRegistry
{
private static $instance;
private $membersList;
//settype($membersList, "array");
private $blacklist;
private function __construct(){
$this->membersList = array();
}
public static function getInstance(){
if (!isset(self::$instance)) {
self::$instance = new MemberRegistry();
}
return self::$instance;
}
function addMember($newMember){
$this->membersList[hash("sha256", $newMember->getUsername())] = $newMember;
}
function searchMembers($targetName){
return $this->membersList[hash("sha256", $targetName)];
}
function loginMembers($username, $password){
$member = self::searchMembers($username);
if($member->checkUserName($username) && $member->checkPassword($password)){
return true;
}
else{
return false;
}
}
}
?>
<file_sep>/createInstances.php
<?php
require 'Booking.php';
require 'ContactInfo.php';
require 'Customer.php';
require 'Flight.php';
require 'FlightBooking.php';
require 'FlightRegistry.php';
require 'HotelRegistry.php';
require 'HotelBooking.php';
require 'HotelRegistry.php';
require 'Member.php';
require 'Member.php';
require 'PaymentInfo.php';
require 'TaxiService.php';
$member1 = new Member('User1', '1234', 20, 'Tasdiq', 'Dewan', '<EMAIL>', '07466448234');
$memberRegistry = MemberRegistry::getInstance();
$memberRegistry->addMember($member1);
$sMR = serialize($memberRegistry);
file_put_contents('MemberRegistryStore', $sMR);
$flight1 = new Flight('2814', 150, '25 April 2018', 'London', '15:30','Paris', '16:30', 'BA', 'LHR', 'LBG', 100.00);
$flight2 = new Flight('2452', 100, '25 April 2018', 'London', '07:45','Paris', '09:00', 'BA', 'LHR', 'LBG', 85.00);
$flight3 = new Flight('3521', 140, '25 April 2018', 'London', '12:00','Paris', '13:00', 'AF', 'LGW', 'LBG', 100.00);
$flight4 = new Flight('1895', 150, '24 May 2018', 'London', '15:30','Paris', '16:30', 'BA', 'LHR', 'LBG', 100.00);
$flightRegistry = FlightRegistry::getInstance();
$flightRegistry->addFlight($flight1);
$flightRegistry->addFlight($flight2);
$flightRegistry->addFlight($flight3);
$flightRegistry->addFlight($flight4);
$sFR = serialize($flightRegistry);
file_put_contents('FlightRegistryStore', $sFR);
$hotel1 = new Hotel('Hotel French-name', 'Paris', 52);
$hotel2 = new Hotel('Some British Place', 'Hull', 27);
$hotel3 = new Hotel('More French Names', 'Paris', 30);
$hotel4 = new Hotel('One More Hotel', 'Paris', 100);
$hotelRegistry = HotelRegistry::getInstance();
$hotelRegistry->addHotel($hotel1);
$hotelRegistry->addHotel($hotel2);
$hotelRegistry->addHotel($hotel3);
$hotelRegistry->addHotel($hotel4);
$sHR = serialize($hotelRegistry);
file_put_contents('HotelRegistryStore', $sHR);
?>
<file_sep>/Hotel.php
<?php
class Hotel
{
private $hotelName;
//settype($hotelName, "string");
private $location;
//settype($location, "string");
private $availableRooms;
//settype($availableRooms, "array");
function __construct($name, $loc, $rooms){
$this->hotelName = $name;
$this->location = $loc;
$this->availableRooms = $rooms;
}
function getHotelName(){
return $this->hotelName;
}
function getLocation(){
return $this->location;
}
function getAvailableRooms(){
return $this->availableRooms;
}
function getAllInfo(){
$allInfo = array($this->hotelName, $this->location, $this->availableRooms);
return $allInfo;
}
}
?>
<file_sep>/TaxiService.php
<?php
class TaxiService
{
private $pickupLocation;
//settype($pickupLocation, "string");
private $destination;
//settype($destination, "string");
private $numberOfPassengers;
//settype($numberOfPassengers, "int");
private $price;
//settype($price, "float");
function __construct($pickup, $dest, $number, $_price){
$this->pickupLocation = $pickup;
$this->destination = $dest;
$this->numberOfPassengers = $number;
$this->price = $_price;
}
function getTaxiInfo(){
$allInfo = array($this->pickupLocation, $this->destination, $this->numberOfPassengers, $this->price);
return $allInfo;
}
}
?>
<file_sep>/src/PaymentInfo.php
<?php
class PaymentInfo
{
private $cardNumber;
//settype($cardNumber, "string");
private $expirationMonth;
//settype($expirationMonth, "string");
private $expirationYear;
//settype($expirationYear, "string");
private $name;
//settype($name, "string");
function __construct($cardNum, $expMonth, $expYear, $name){
$this->cardNumber = $cardNum;
$this->expirationMonth = $expMonth;
$this->expirationYear = $expYear;
$this->name = $name;
}
function setAllPaymentInfo($cn, $em, $ey, $n){
$this->cardNumber = $cn;
$this->expirationMonth = $em;
$this->expirationYear = $ey;
$this->name = $n;
}
function getPaymentInfo(){
$allPaymentInfo = array($cardNumber, $expirationMonth, $expirationYear, $name);
return $allPaymentInfo;
}
}
?>
<file_sep>/src/Member.php
<?php
require 'Customer.php';
require 'PaymentInfo.php';
require 'ContactInfo.php';
class Member extends Customer
{
private $userName;
//settype($userName, "string");
private $password;
//settype($password, "string");
private $paymentInfo;
private $contactInfo;
private $age;
//settype($age, "int");
function __construct($u, $p, $a, $fn, $ln, $e, $pn){
parent::__construct();
$this->userName = $u;
$this->password = $p;
$this->age = $a;
$contactInfo = new ContactInfo($fn, $ln, $e, $pn);
}
function setPaymentInfo($cn, $em, $ey, $n){
if($paymentInfo instanceof PaymentInfo){
$paymentInfo->setAllPaymentInfo($cn, $em, $ey, $n);
}
else{
$paymentInfo = new PaymentInfo($cn, $em, $ey, $n);
}
}
function getUsername(){
return $this->userName;
}
function changePassword($newPassword){
$this->password = $<PASSWORD>;
}
function changeEmail($newEmail){
$contactInfo->setEmail($newEmail);
}
function changePhoneNumber($newNumber){
$contactInfo->setPhoneNumber($newNumber);
}
function changeAddress($newAddress){
$contactInfo->setAddress($newAddress);
}
function getAllContactInfo(){
return $contactInfo->getContactInfo();
}
function getAllPaymentInfo(){
return $paymentInfo->getPaymentInfo();
}
function getAge(){
return $this->age;
}
function checkUserName($inputUserName){
if($this->userName == $inputUserName){
return true;
}
else{
return false;
}
}
function checkPassword($inputPassword){
if($this->password == $inputPassword){
return true;
}
else{
return false;
}
}
}
?>
| 447cff6372dbc06de66693d3cadcf92003497959 | [
"Markdown",
"PHP"
] | 16 | PHP | Akapoor99/awesomeTravels | b3eda4a781326ed95f8eb8c1aedb69eccfc2bd7c | 2f55899405f630fcae3822408e239c34ae4e6d3b |
refs/heads/master | <repo_name>sg-object/sg-dynamic-ehcache<file_sep>/src/main/resources/messages/error.properties
0001=Not Found Cache!!!!
0002=Not Found Cache Data!!!!
0003=This Cache Name is Using. Please Input Another Cache Name!!!!
9999=Internal Server Error!!!!<file_sep>/src/main/resources/application.properties
ehcache.cacheNames.cacheName=cacheNames
ehcache.csv.path=/csv/
ehcache.diskStore.suffix=.data, .index<file_sep>/src/main/java/com/sg/dynamic/ehcache/ServletInitializer.java
package com.sg.dynamic.ehcache;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
public class ServletInitializer extends SpringBootServletInitializer {
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(SgDynamicEhcacheApplication.class);
}
}
<file_sep>/README.md
# sg-dynamic-ehcache
## 주요 기능
* Rest API를 사용한 Ehcache 조작
* CSV File 기반 cache 초기화
## Version
* Spring Boot : 2.2.6.RELEASE
* Ehcache : 2.10.6
* Swagger : 2.9.2
## 사전 설정
* ehcache.xml 수정
Path : /sg-dynamic-ehcache/src/main/resources/config/ehcache.xml
수정 : cache data가 물리적 파일로 저장되는 diskStore Path 수정
* application.properties 수정
Path : /sg-dynamic-ehcache/src/main/resources/application.properties
수정 : CSV File이 임시 저장 되는 ehcache.csv.path 수정
## 테스트
Application 구동 후 Swagger URL 접속
URL : http://localhost:8080/swagger-ui.html
## ScreenShot



<file_sep>/src/main/java/com/sg/dynamic/ehcache/builder/CacheBuilder.java
package com.sg.dynamic.ehcache.builder;
import java.io.File;
import java.util.Optional;
import javax.annotation.PostConstruct;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.ehcache.EhCacheCacheManager;
import org.springframework.stereotype.Component;
import com.sg.dynamic.ehcache.common.exception.NotFoundCacheException;
import com.sg.dynamic.ehcache.common.exception.UsedCacheNameException;
import net.sf.ehcache.Cache;
import net.sf.ehcache.DiskStorePathManager;
import net.sf.ehcache.Ehcache;
import net.sf.ehcache.Element;
import net.sf.ehcache.config.CacheConfiguration;
@Component
public class CacheBuilder {
@Autowired
private EhCacheCacheManager cacheManager;
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Value("${ehcache.cacheNames.cacheName}")
private String cacheNames;
@Value("${ehcache.diskStore.suffix}")
private String[] suffix;
@PostConstruct
private void initCache(){
Ehcache cache = getEhcache(cacheNames);
cache.getKeys().forEach(cacheName -> {
logger.info("========= init cache : {} =========", cacheName);
createCache(cacheName.toString());
});
}
public void createCacheAndAddName(String cacheName){
// Cache Name Add & Check
checkNameCache(cacheName);
addCacheName(cacheName);
createCache(cacheName);
}
public void removeCache(String cacheName){
checkNameCache(cacheName);
Ehcache ehcache = getEhcache(cacheNames);
if(ehcache.get(cacheName) != null){
ehcache.remove(cacheName);
Ehcache target = getEhcache(cacheName);
target.removeAll();
cacheManager.getCacheManager().removeCache(cacheName);
logger.info("========= remove cache : {} =========", cacheName);
DiskStorePathManager diskStorePathManager = cacheManager.getCacheManager().getDiskStorePathManager();
for(String value : suffix){
File file = diskStorePathManager.getFile(cacheName, value);
if(file.exists()){
logger.info("========= remove cache file : {} =========", file.getName());
file.delete();
}
}
}else{
throw new NotFoundCacheException();
}
}
public Ehcache getEhcache(String cacheName){
return Optional.ofNullable(cacheManager.getCacheManager().getCache(cacheName)).orElseThrow(() -> new NotFoundCacheException());
}
public boolean checkCacheName(String cacheName){
Ehcache cache = getEhcache(cacheNames);
return cache.get(cacheName) != null ? true : false;
}
private void createCache(String cacheName){
CacheConfiguration cacheConfig = new CacheConfiguration();
cacheConfig.eternal(true);
cacheConfig.maxEntriesLocalHeap(10000);
cacheConfig.name(cacheName);
//conf.persistence(new PersistenceConfiguration().strategy(Strategy.LOCALRESTARTABLE).synchronousWrites(true));
cacheConfig.setDiskPersistent(true);
cacheConfig.overflowToDisk(true);
cacheManager.getCacheManager().addCache(new Cache(cacheConfig));
logger.info("========= create cache : {} =========", cacheName);
}
private void addCacheName(String cacheName){
Ehcache ehcache = getEhcache(cacheNames);
if(ehcache.get(cacheName) == null){
ehcache.put(new Element(cacheName, cacheName));
}else{
throw new UsedCacheNameException();
}
}
private void checkNameCache(String cacheName){
if(cacheNames.equals(cacheName)){
throw new UsedCacheNameException();
}
}
}
<file_sep>/src/main/java/com/sg/dynamic/ehcache/common/exception/ExceptionControllerAdvice.java
package com.sg.dynamic.ehcache.common.exception;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.support.MessageSourceAccessor;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import com.sg.dynamic.ehcache.common.model.ExceptionResponse;
@RestControllerAdvice
public class ExceptionControllerAdvice {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private MessageSourceAccessor messageSourceAccessor;
@ExceptionHandler(Exception.class)
@ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR)
public ExceptionResponse handleInternalServerErrorException(HttpServletResponse response, Exception e){
logger.error("Internal Server Error Exception has caught.", e);
return getExceptionResponse(AbstractException.INTERNAL_SERVER_ERROR);
}
@ExceptionHandler(NotFoundCacheException.class)
@ResponseStatus(HttpStatus.NOT_FOUND)
public ExceptionResponse handleNotFoundCacheException(HttpServletResponse response, NotFoundCacheException e){
logger.error("Not Found Cache Exception has caught.", e);
return getExceptionResponse(e.getErrorCode());
}
@ExceptionHandler(NotFoundCacheDataException.class)
@ResponseStatus(HttpStatus.NOT_FOUND)
public ExceptionResponse handleNotFoundCacheDataException(HttpServletResponse response, NotFoundCacheDataException e){
logger.error("Not Found Cache Data Exception has caught.", e);
return getExceptionResponse(e.getErrorCode());
}
@ExceptionHandler(UsedCacheNameException.class)
@ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR)
public ExceptionResponse handleUsedCacheNameException(HttpServletResponse response, UsedCacheNameException e){
logger.error("Used Cache Name Exception has caught.", e);
return getExceptionResponse(e.getErrorCode());
}
private ExceptionResponse getExceptionResponse(String errorCode){
return new ExceptionResponse(errorCode, messageSourceAccessor.getMessage(errorCode));
}
}
<file_sep>/src/main/java/com/sg/dynamic/ehcache/SgDynamicEhcacheApplication.java
package com.sg.dynamic.ehcache;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SgDynamicEhcacheApplication {
public static void main(String[] args) {
SpringApplication.run(SgDynamicEhcacheApplication.class, args);
}
}
| 73e91b911134c4c2ee7a9ab7d194f2ea4fb6d37a | [
"Markdown",
"Java",
"INI"
] | 7 | INI | sg-object/sg-dynamic-ehcache | 3f73cb437804d25261aa4d3ff84c2c0233fb714a | fbf3b9dd80ecef393914ba57364aa6c31a36d393 |
refs/heads/master | <repo_name>kyle45/medium<file_sep>/toutiao/src/main/java/com/nowcoder/controller/IndexController.java
package com.nowcoder.controller;
import com.nowcoder.model.User;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Controller
public class IndexController {
@RequestMapping(path = {"/profile/{groupId}/{userId}"})
@ResponseBody
public String profile(@PathVariable("groupId") int groupId,
@PathVariable("userId") int userId,
@RequestParam(value = "type", defaultValue = "1") int type,
@RequestParam(value="key", defaultValue = "nowcoder") String key){
return String.format("GID{%d}, UID{%d}, TYPE{%d}, KEY{%s}", groupId, userId, type, key);
}
@RequestMapping(path = {"/"})
@ResponseBody
public String index(){
return "Hello, LYB";
}
@RequestMapping(value = {"/vm"})
public String news(Model model){
List<String> colors = Arrays.asList(new String[]{"RED", "GREEN", "BLUE"});
Map<String, String> map = new HashMap<String, String>();
for(int i = 0; i < 4; ++i){
map.put(String.valueOf(i), String.valueOf(i*i));
}
model.addAttribute("value1", "vv1");
model.addAttribute("colors", colors);
model.addAttribute("map", map);
model.addAttribute("user", new User("Jim"));
return "news";
}
}
| 3568ce17a10c0325e1c99d3919ea5f6d5e438b46 | [
"Java"
] | 1 | Java | kyle45/medium | 07230b66db3909c8ce64f75c8b8f96d4caadea8d | edd80f8e343853aeca4c1799f8bb165aff991cb2 |
refs/heads/master | <repo_name>MasriMCP/Aisha<file_sep>/src/tableInfo/LifeEventsInfo.java
package tableInfo;
public class LifeEventsInfo {
String event,result;
int age;
public LifeEventsInfo(String event, int age, String result) {
this.event = event;
this.result = result;
this.age = age;
}
public String getEvent() {
return event;
}
public void setEvent(String event) {
this.event = event;
}
public String getResult() {
return result;
}
public void setResult(String result) {
this.result = result;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
}
<file_sep>/src/tableInfo/PreviousHistoryInfo.java
package tableInfo;
public class PreviousHistoryInfo {
private String condition,treatment,result;
private int age, duration;
public PreviousHistoryInfo(String condition,int age, int duration, String treatment, String result ) {
this.condition = condition;
this.treatment = treatment;
this.result = result;
this.age = age;
this.duration = duration;
}
public String getCondition() {
return condition;
}
public void setCondition(String condition) {
this.condition = condition;
}
public String getTreatment() {
return treatment;
}
public void setTreatment(String treatment) {
this.treatment = treatment;
}
public String getResult() {
return result;
}
public void setResult(String result) {
this.result = result;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public int getDuration() {
return duration;
}
public void setDuration(int duration) {
this.duration = duration;
}
}
<file_sep>/src/controllers/Controller.java
/**
*
*/
package controllers;
import com.mysql.cj.util.StringUtils;
import javafx.application.Platform;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.*;
import javafx.scene.control.TextField;
import javafx.stage.Stage;
import main.AppointmentPane;
import tableInfo.*;
import java.io.File;
import java.io.IOException;
import java.sql.*;
import java.net.URL;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.Period;
import java.util.HashMap;
import java.util.ResourceBundle;
/**
* <h1>Controller</h1>
* this class acts as our main controller, it handles the following:
* <ul>
* <li>patient record keeping</li>
* <ul>
* <li>viewing patient records</li>
* <li>adding new Records</li>
* </ul>
* <li>patient tracking</li>
* <ul>
* <li>viewing appointments</li>
* <li>adding appointments</li>
* <li>viewing visits</li>
* <li>adding visits</li>
* </ul>
* </ul>
*/
public class Controller implements Initializable {
public Connection con;//database connection
public Statement stm;
public ResultSet rs;
@FXML
TitledPane showRecordsTitledPane;
@FXML
TableView<PatientInfo> recordSearchTable;
@FXML
TextField recordAddPNameT;
@FXML
TextField recordSearchT;
@FXML
Button recordSearchB;
@FXML
TitledPane addRecordsTitledPane;
@FXML
TextField recordAddCGNameT;
@FXML
TextField recordAddRelT;
@FXML
TextField recordAddCGBDDay;
@FXML
TextField recordAddCGBDMonth;
@FXML
TextField recordAddCGBDYear;
@FXML
RadioButton pGenderF;
@FXML
ToggleGroup patientGender;
@FXML
RadioButton pGenderM;
@FXML
RadioButton cgGenderF;
@FXML
ToggleGroup careGiverGender;
@FXML
RadioButton cgGenderM;
@FXML
RadioButton mStatusB;
@FXML
ToggleGroup mStatus;
@FXML
RadioButton mStatusM;
@FXML
RadioButton mStatusD;
@FXML
RadioButton mStatusW;
@FXML
RadioButton mStatusH;
@FXML
RadioButton mStatusC;
@FXML
TextField recordAddBirthPlaceT;
@FXML
RadioButton residence1M;
@FXML
ToggleGroup citizen1;
@FXML
RadioButton residence1R;
@FXML
CheckBox recordAddResidence2CB1;
@FXML
CheckBox recordAddResidence2CB2;
@FXML
RadioButton adressN;
@FXML
ToggleGroup residence;
@FXML
RadioButton adressG;
@FXML
RadioButton adressD;
@FXML
RadioButton adressR;
@FXML
RadioButton adressK;
@FXML
RadioButton famTypeE;
@FXML
ToggleGroup famType;
@FXML
RadioButton famTypeA;
@FXML
RadioButton famTypeN;
@FXML
RadioButton residenceTypeO;
@FXML
ToggleGroup residenceTypeGroup;
@FXML
RadioButton residenceTypeR;
@FXML
RadioButton residenceTypeN;
@FXML
RadioButton privatePhoneY;
@FXML
ToggleGroup phoneType;
@FXML
RadioButton privatePhoneN;
@FXML
RadioButton educationI;
@FXML
ToggleGroup education;
@FXML
RadioButton educationP;
@FXML
RadioButton educationS;
@FXML
RadioButton educationU;
@FXML
TextField recordAddEducationYearsT;
@FXML
RadioButton occupationalStatusE;
@FXML
ToggleGroup jobGroup;
@FXML
RadioButton occupationalStatusN;
@FXML
RadioButton occupationalStatusU;
@FXML
TextField phoneTypeNameT;
@FXML
RadioButton consentY;
@FXML
ToggleGroup consent;
@FXML
RadioButton consentN;
@FXML
RadioButton interviewPlaceC;
@FXML
CheckBox recordAddAttendedCB;
@FXML
ToggleGroup interviewLocation;
@FXML
RadioButton interviewPlaceH;
@FXML
RadioButton interviewPlaceI;
@FXML
TextField recordAddNationalIDT;
@FXML
CheckBox recordAddFamAttendedCB;
@FXML
TextField personalHistoryPregnancy;
@FXML
TextField personalHistoryChildhood;
@FXML
TextField personalHistoryTeens;
@FXML
TextField personalHistoryMarriage;
@FXML
TextField personalHistoryWork;
@FXML
TextField personalHistoryHobbies;
@FXML
TableView<PreviousHistoryInfo> recordAddPreviousHistory;
@FXML
TableView<LifeEventsInfo> recordAddLifeEvents;
@FXML
TableView<FamHistoryInfo> famMedHistory;
@FXML
TextField recordAddOccupationT;
@FXML
TextField residenceType;
@FXML
TextField recordAddFamAttendedT;
@FXML
TextField recordAddPhoneT;
@FXML
TextField recordAddMHPName;
@FXML
TextField recordAddSalaryT;
@FXML
CheckBox recordAddMessagesCB;
@FXML
CheckBox recordAddVisitsCB;
@FXML
CheckBox recordAddMeetFamCB;
@FXML
RadioButton selfHarmY;
@FXML
ToggleGroup selfHarmGroup;
@FXML
RadioButton selfHarmN;
@FXML
RadioButton othersHarmY;
@FXML
ToggleGroup peopleHarmGroup;
@FXML
RadioButton othersHarmN;
@FXML
RadioButton healthDeterY;
@FXML
ToggleGroup mentalDeterGroup;
@FXML
RadioButton healthDeterN;
@FXML
RadioButton needOfProtectionY;
@FXML
ToggleGroup protectionGroup;
@FXML
RadioButton needOfProtectionN;
@FXML
Button recordAddGAD;
@FXML
Button recordAddPHQ;
@FXML
Button recordAddWHO;
@FXML
Button recordAddGHQ;
@FXML
TextField recordAddFileNumberT;
@FXML
ToggleGroup ghq1;
@FXML
ToggleGroup ghq2;
@FXML
ToggleGroup ghq3;
@FXML
ToggleGroup ghq4;
@FXML
ToggleGroup ghq5;
@FXML
ToggleGroup ghq6;
@FXML
ToggleGroup ghq7;
@FXML
ToggleGroup ghq8;
@FXML
ToggleGroup ghq9;
@FXML
ToggleGroup ghq10;
@FXML
ToggleGroup ghq11;
@FXML
ToggleGroup ghq12;
@FXML
ToggleGroup who1;
@FXML
ToggleGroup who2;
@FXML
ToggleGroup who3;
@FXML
ToggleGroup who4;
@FXML
ToggleGroup who5;
@FXML
ToggleGroup who6;
@FXML
ToggleGroup who7;
@FXML
ToggleGroup who8;
@FXML
ToggleGroup who10;
@FXML
ToggleGroup who11;
@FXML
ToggleGroup who12;
@FXML
TextField recordAddBDYear;
@FXML
TextField recordAddBDMonth;
@FXML
TextField recordAddBDDay;
@FXML
TextField addAppointmentFileNumberT;
@FXML
TextField addAppointmentPlaceT;
@FXML
TextField addAppointmentYear;
@FXML
TextField addAppointmentMonth;
@FXML
TextField addAppointmentDay;
@FXML
TextField addAppointmentHour;
@FXML
TextField addAppointmentMinute;
@FXML
TextField addAppointmentMHP;
@FXML
TabPane tabPane;
@FXML
Accordion recordsA;
@FXML
ScrollPane recordAddScroll;
@FXML
ListView<AppointmentPane> appointmentTodayList;
@FXML
TableView<AppointmentInfo> appointmentSearchTable;
@FXML
RadioButton appointmentAddAM;
@FXML
CheckBox recordAddOthersAttendedCB;
@FXML
TextField recordAddOthersAttendedT;
@FXML
RadioButton appointmentAddPM;
@FXML
Accordion appointmentsA;
@FXML
TitledPane showAppointmentsTitledPane;
@FXML
TextField addVisitPlace;
@FXML
TextField addVisitMHP;
@FXML
TextField addVisitFilenumber;
@FXML
TextField visitAddDay;
@FXML
TextField addVisitMonth;
@FXML
TextField addVisitYear;
@FXML
TextField visitAddHour;
@FXML
TextField addVisitMinute;
@FXML
CheckBox addVisitCB;
@FXML
TextField addVisitS;
@FXML
TextField addVisitO;
@FXML
TextField addVisitA;
@FXML
TextField addVisitP;
@FXML
TableView<TreatmentInfo> addVisitTable;
@FXML
TextField visitAddMed;
@FXML
TextField visitAddDose;
@FXML
TextField visitAddFreq;
@FXML
TextField visitAddDur;
@FXML
TextField recordAddInstitute;
@FXML
TextField visitAddName;
@FXML
TitledPane visitShowAllTitledPane;
@FXML
TextField recordAddComplaintPT;
@FXML
TextField recordAddComplaintCGT;
@FXML
TextField recordAddComplaintFT;
@FXML
TextArea recordAddCurrentHistory;
@FXML
TextField recordAddMSE1;
@FXML
TextField recordAddMSE2;
@FXML
TextField recordAddMSE3;
@FXML
TextField recordAddMSE4;
@FXML
TextField recordAddMSE5;
@FXML
TextField recordAddMSE6;
@FXML
TextField recordAddMSE7;
@FXML
TextField recordAddMSE8;
@FXML
TextField recordAddMSE9;
@FXML
TextField recordAddMSE10;
@FXML
TextField recordAddMSE11;
@FXML
TextField recordAddMSE12;
@FXML
RadioButton addRecordCounseling;
@FXML
ToggleGroup psychTypeGroup;
@FXML
RadioButton addRecordCBT;
@FXML
RadioButton addRecordBehave;
@FXML
RadioButton addRecordPS;
@FXML
RadioButton recordAddTherapyOther;
@FXML
TextField addRecordTherapyOtherT;
@FXML
TextField addRecordPsychMHPName;
@FXML
TextField addRecordPsychSessions;
@FXML
TextField addRecordPsychFirstDateDay;
@FXML
TextField addRecordPsychFirstDateMonth;
@FXML
TextField addRecordPsychFirstDateYear;
@FXML
TextField addRecordPsychPlace;
@FXML
TextArea summary;
@FXML
CheckBox recordAddPAttendedCB;
@FXML
CheckBox recordAddCGAttendedCB;
@FXML
TextField recordAddFamHistoryRel;
@FXML
TextField recordAddFamHistoryCon;
@FXML
TextField recordAddFamHistoryTreatment;
@FXML
TextField recordAddFamHistoryStatus;
@FXML
TextField previousHistoryDisease;
@FXML
TextField previousHistoryAge;
@FXML
TextField previousHistoryDuration;
@FXML
TextField previousHistoryTreatment;
@FXML
TextField previousHistoryResult;
@FXML
TextField recordAddLifeEventsEvent;
@FXML
TextField recordAddLifeEventsAge;
@FXML
TextField recordAddLifeEventsResult;
@FXML
TextField recordAddConflicts;
@FXML
TextField recordAddSES;
private final static HashMap<String, String> addressMap = new HashMap<>();//maps address chars stored in db to thier actual names
private final static HashMap<String, String> genderMap = new HashMap<>();// M -> male, F -> female
private PreparedStatement recordAdd,
recordSearchName, //searching records by name
recordSearchFileNum,//searching records by file number
recordAddPreviousHistorySTM,
recordAddLifeEventsSTM,
recordAddFamHistorySTM,
recordGetAll,//gets all the records
appointmentAdd,//adds an appointment
appointmentSearch,//searching appointments
appointmentGetAll,//gets all appointments
visitAdd,//adds a visit
treatmentAdd;
private ObservableList<PreviousHistoryInfo> ph = FXCollections.observableArrayList();
private ObservableList<LifeEventsInfo> le = FXCollections.observableArrayList();
private ObservableList<FamHistoryInfo> fh = FXCollections.observableArrayList();
private ObservableList<TreatmentInfo> treatmentList = FXCollections.observableArrayList();
/**
* deletes a patient record from the database.
* <b>note:</b> allowed only if logged in as a supervisor
* @throws SQLException
*/
@FXML
void deleteRecord() throws SQLException {
int id = recordSearchTable.getSelectionModel().getSelectedItem().getFileNumber();
stm.execute("delete from previous_history where Patient_filenumber=" + id);
stm.execute("delete from fam_med_history where Patient_filenumber=" + id);
stm.execute("delete from life_events where Patient_filenumber=" + id);
stm.execute("delete from plan_medical_treatment where Patient_filenumber=" + id);
stm.execute("delete from patient where filenumber =" + id);
updateRecordSearch();
}
/**
* adds a new patient record to the database.
*/
@FXML
void addNewRecord() {
new Thread(new AddRecordTask()).start();
}
@FXML
void addNewTestGAD() throws Exception {
}
@FXML
void addNewTestGHQ() {
}
@FXML
void addNewTestPHQ() {
}
@FXML
void addNewTestWHO() {
}
/**
* adds a new appointment for a patient to the database.
* a counselor can only add an appointment to the patients they entered.
* a supervisor can add an appointment for any patient.
* @throws SQLException
*/
@FXML
void addAppointment() throws SQLException {
appointmentAdd.setInt(1, Integer.parseInt(addAppointmentFileNumberT.getText()));
appointmentAdd.setString(2, dateFormat(addAppointmentYear.getText(), addAppointmentMonth.getText(), addAppointmentDay.getText(), String.valueOf(Integer.parseInt(addAppointmentHour.getText()) + (appointmentAddAM.isSelected() ? 0 : 12))
, addAppointmentMinute.getText()));
appointmentAdd.setString(3, addAppointmentMHP.getText());
appointmentAdd.setString(4, addAppointmentPlaceT.getText());
appointmentAdd.execute();
updateAppointmentSearch();
}
/**
* adds a new appointment for a patient to the database.
* a counselor can only add a visit to the patients they entered.
* a supervisor can add a visit for any patient.
* @throws SQLException
*/
@FXML
void addVisit() throws SQLException {
if (addVisitCB.isSelected()) {
appointmentAdd.setInt(1, Integer.parseInt(addVisitFilenumber.getText()));
appointmentAdd.setString(2, dateFormat(addVisitYear.getText(), addVisitMonth.getText(), visitAddDay.getText()
, visitAddHour.getText(), addVisitMinute.getText()));
appointmentAdd.setString(3, addVisitMHP.getText());
appointmentAdd.setString(4, addVisitPlace.getText());
appointmentAdd.execute();
}
visitAdd.setInt(1, Integer.parseInt(addVisitFilenumber.getText()));
visitAdd.setString(2, dateFormat(addVisitYear.getText(), addVisitMonth.getText(), visitAddDay.getText()
, visitAddHour.getText(), addVisitMinute.getText()));
visitAdd.setString(3, addVisitS.getText());
visitAdd.setString(4, addVisitO.getText());
visitAdd.setString(5, addVisitA.getText());
visitAdd.setString(6, addVisitP.getText());
visitAdd.execute();
for (TreatmentInfo i : treatmentList) {
treatmentAdd.setString(1, i.getMed());
treatmentAdd.setString(2, i.getDose());
treatmentAdd.setString(3, i.getDur());
treatmentAdd.setInt(4, i.getFreq());
treatmentAdd.setString(5, i.getName());
treatmentAdd.setInt(6, Integer.parseInt(addVisitFilenumber.getText()));
treatmentAdd.setString(7, dateFormat(addVisitYear.getText(), addVisitMonth.getText(), visitAddDay.getText()
, visitAddHour.getText(), addVisitMinute.getText()));
treatmentAdd.execute();
}
}
/**
* retrieves data from the db and fills the table in the "view records" tab
* @throws SQLException
*/
@FXML
void searchRecords() throws SQLException {
String q = recordSearchT.getText();
if (StringUtils.isStrictlyNumeric(q)) {
recordSearchFileNum.setString(1, q + "%");
rs = recordSearchFileNum.executeQuery();
} else {
recordSearchName.setString(1, "%" + q + "%");
rs = recordSearchName.executeQuery();
}
ObservableList<PatientInfo> data;
data = FXCollections.observableArrayList();
while (rs.next()) {
data.add(new PatientInfo(rs.getInt(1), rs.getString(2),
genderMap.get(rs.getString(3)),
getAge(rs.getString(4)),
addressMap.get(rs.getString(5))));
}
Platform.runLater(() -> {
recordSearchTable.setItems(data);
});
}
@Override
public void initialize(URL location, ResourceBundle resources) {
//viewRecord();
new Thread(() -> {
addressMap.put("N", "شمال غزة");
addressMap.put("G", "غزة");
addressMap.put("D", "دير البلح");
addressMap.put("R", "رفح");
addressMap.put("K", "خانيونس");
genderMap.put("M", "ذكر");
genderMap.put("F", "انثى");
try {
//********************************************establish db connection***************************************************
Class.forName("com.mysql.cj.jdbc.Driver");
con = DriverManager.getConnection("jdbc:mysql://localhost:3306", "MCP", "dbPass");
stm = con.createStatement();
stm.execute("use aisha");
//**********************************************prepared statements****************************************************
recordAdd = con.prepareStatement("INSERT INTO `Aisha`.`Patient` (`filenumber`, `dateAdded`," +
" `name`, `gender`, `maritalstatus`, `birthdate`, `placeofbirth`, `cg_name`, `cg_gender`," +
" `cg_birthdate`, `cg_relationship`, `cg_attendeded`, `nationalid`, `residence1`, `residence2`,`residence3`," +
" `address`, `famtype`, `residencetype`, `phonenumber`, `privatephone`, `agreemessages`," +
" `agreehousevisit`, `agreemeetfam`, `education`, `yearsofeducation`, `occupation`, `occupationalstatus`," +
" `salary`, `mhp_name`, `consent`, `patient_attended`, `cg_attended`, `fam_attended`, `others_attended`," +
" `interview_place`, `chiefcomplaint_patient`, `chiefcomplaint_cg`, `chiefcomplaint_fam`, `current_history`," +
" `conflicts`, `socioeconomic_status`, `looks`, `behaviour`, `mood`, `talks`, `knowledge`, `awareness`," +
" `thinking1`, `thinking2`, `awareness2`, `awareness3`, `judgement`, `control`, `self_harm`," +
" `others_harm`, `health_deter`, `need_of_protection`, `psych_type`, `psych_mhp_name`," +
" `psych_number_of_sessions`, `psych_first_date`, `psych_place`, `summary`, `pregnancy`, `childhood`," +
" `teens`, `marriage`, `work`, `hobbies`)" +
" VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?," +
" ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?," +
" ?, ?, ?, ?);");
recordSearchName = con.prepareStatement("select filenumber,name,gender,birthdate,address from patient where name like ?");
recordSearchFileNum = con.prepareStatement("select filenumber,name,gender,birthdate,address from patient where filenumber = ?");
recordAddPreviousHistorySTM = con.prepareStatement("INSERT INTO `Aisha`.`previous_history` (`disease`, `age`, `duration`, `treatment`, `result`, `Patient_filenumber`) VALUES (?, ?, ?, ?, ?, ?);");
recordAddLifeEventsSTM = con.prepareStatement("INSERT INTO `Aisha`.`life_events` (`event`, `age`, `result`, `Patient_filenumber`) VALUES (?, ?, ?, ?);");
recordAddFamHistorySTM = con.prepareStatement("INSERT INTO `Aisha`.`fam_med_history` (`relationship`, `disease`, `treatment`, `result`, `Patient_filenumber`) VALUES (?, ?, ?, ?, ?);");
appointmentSearch = con.prepareStatement("select patient.name,appointment.date,appointment.place,appointment.mhp_name from appointment inner join " +
"patient on patient.filenumber=appointment.patient_filenumber where patient.name ");//TODO not done
recordGetAll = con.prepareStatement(" select filenumber,name,gender,birthdate,address from patient;");
appointmentAdd = con.prepareStatement("insert into appointment(patient_filenumber,date_added,date,mhp_name,place) values(?,now(),?,?,?)");
appointmentGetAll = con.prepareStatement("select patient.name,appointment.date,appointment.place,appointment.mhp_name from appointment inner join " +
"patient on patient.filenumber=appointment.patient_filenumber");
visitAdd = con.prepareStatement("insert into visit(Appointment_Patient_filenumber,Appointment_date,subjective,objective," +
"assessment,plan)" +
" values(?,?,?,?,?,?)");
treatmentAdd = con.prepareStatement("insert into visit_medical_treatment(name,dose,duration,frequency,doctor_name," +
"visit_Appointment_Patient_filenumber,visit_Appointment_date) values(?,?,?,?,?,?,?)");
//***********************************************set up tables and lists**************************************************
rs = recordGetAll.executeQuery();
ObservableList<PatientInfo> recordData;
ObservableList<AppointmentInfo> appointmentData;
recordData = FXCollections.observableArrayList();
appointmentData = FXCollections.observableArrayList();
while (rs.next()) {
recordData.add(new PatientInfo(rs.getInt(1), rs.getString(2),
genderMap.get(rs.getString(3)),
getAge(rs.getString(4)),
addressMap.get(rs.getString(5))));
}
rs = appointmentGetAll.executeQuery();
while (rs.next()) {
appointmentData.add(new AppointmentInfo(rs.getString(1), rs.getString(2), rs.getString(3),
rs.getString(4)));
}
ObservableList<AppointmentPane> appointmentPaneList = FXCollections.observableArrayList();
rs = stm.executeQuery("select patient.name,appointment.date,appointment.place,appointment.mhp_name from appointment inner join" +
" patient on patient.filenumber = appointment.patient_filenumber where date(appointment.date) = curdate() " +
"and now()< appointment.date order by date;");
while (rs.next()) {
appointmentPaneList.add(new AppointmentPane(new AppointmentInfo(rs.getString(1), rs.getString(2),
rs.getString(3), rs.getString(4))));
}
Platform.runLater(() -> {
recordSearchTable.setItems(recordData);
appointmentSearchTable.setItems(appointmentData);
appointmentTodayList.setItems(appointmentPaneList);
addVisitTable.setItems(treatmentList);
recordAddPreviousHistory.setItems(ph);
recordAddLifeEvents.setItems(le);
famMedHistory.setItems(fh);
});
//********************************************other***************************************************
recordsA.setExpandedPane(showRecordsTitledPane);
appointmentsA.setExpandedPane(showAppointmentsTitledPane);
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
}
}).start();
}
private String dateFormat(String year, String month, String day) {
if(year.equals("")||month.equals("")||day.equals("")) return null;
return year + "-" + month + "-" + day + " 00:00:00";
}
private String dateFormat(String year, String month, String day, String hour, String minute) {
if(year.equals("")||month.equals("")||day.equals("")) return null;
return year + "-" + month + "-" + day + " " + hour + ":" + minute + ":00";
}
private String dateFormat(int year, int month, int day, int hour, int minute) {
return year + "-" + month + "-" + day + " " + hour + ":" + minute + ":00";
}
private int getAge(String bd) {
//used to get the age from a date string in YYYY-MM-DD format
if(bd==null) return 0;
String[] date = bd.split("-");
return Period.between(LocalDate.of(Integer.parseInt(date[0]), Integer.parseInt(date[1]), Integer.parseInt(date[2])), LocalDate.now()).getYears();
}
/**
* used to update the appointment table and today's appointments table after a new appointment has been added
*/
private void updateAppointmentSearch() {
new Thread(() -> {
try {
ObservableList<AppointmentInfo> appointmentData;
appointmentData = FXCollections.observableArrayList();
rs = appointmentGetAll.executeQuery();
while (rs.next()) {
appointmentData.add(new AppointmentInfo(rs.getString(1), rs.getString(2), rs.getString(3),
rs.getString(4)));
}
ObservableList<AppointmentPane> appointmentPaneList = FXCollections.observableArrayList();
rs = stm.executeQuery("select patient.name,appointment.date,appointment.place,appointment.mhp_name from appointment inner join" +
" patient on patient.filenumber = appointment.patient_filenumber where date(appointment.date) = curdate() " +
"and now()> appointment.date order by date;");
while (rs.next()) {
appointmentPaneList.add(new AppointmentPane(new AppointmentInfo(rs.getString(1), rs.getString(2),
rs.getString(3), rs.getString(4))));
}
Platform.runLater(() -> {
appointmentSearchTable.setItems(appointmentData);
appointmentTodayList.setItems(appointmentPaneList);
});
} catch (SQLException e) {
e.printStackTrace();
}
}).start();
}
/**
* used to update the view records table after a new record has been added
*/
private void updateRecordSearch() throws SQLException {
rs = recordGetAll.executeQuery();
ObservableList<PatientInfo> data;
data = FXCollections.observableArrayList();
while (rs.next()) {
data.add(new PatientInfo(rs.getInt(1), rs.getString(2),
genderMap.get(rs.getString(3)),
getAge(rs.getString(4)),
addressMap.get(rs.getString(5))));
}
Platform.runLater(() -> {
recordSearchTable.setItems(data);
recordAddScroll.setVvalue(0);
});
}
@FXML
void surpriseVisit() {
boolean temp = addVisitCB.isSelected();
addVisitPlace.setText("");
addVisitPlace.setEditable(temp);
addVisitMHP.setText("");
addVisitMHP.setEditable(temp);
}
@FXML
public void viewRecord() {
int id = recordSearchTable.getSelectionModel().getSelectedItem().getFileNumber();
Parent root;
try {
FXMLLoader fxmlLoader = new FXMLLoader(new File("C:\\Users\\jit\\IdeaProjects\\Aisha\\Resources\\view\\view record.fxml").toURL());
Stage stage = new Stage();
stage.setTitle("معلومات المريض");
stage.setScene(new Scene(fxmlLoader.load(), 800, 600));
RecordViewController rv = fxmlLoader.getController();
rv.loadRecord(id,con);
stage.show();
}
catch (IOException e) {
e.printStackTrace();
}
}
@FXML
void visitAddTreatmentRow() {
treatmentList.add(new TreatmentInfo(visitAddMed.getText(),
visitAddDose.getText(),
Integer.parseInt(visitAddFreq.getText()),
visitAddDur.getText(),
visitAddName.getText()));
visitAddMed.setText("");
visitAddDose.setText("");
visitAddFreq.setText("");
visitAddDur.setText("");
visitAddName.setText("");
}
@FXML
void addPreviousHistory() {
ph.add(new PreviousHistoryInfo(previousHistoryDisease.getText(), Integer.parseInt(previousHistoryAge.getText()), Integer.parseInt(previousHistoryDuration.getText())
, previousHistoryTreatment.getText(), previousHistoryResult.getText()));
recordAddPreviousHistory.setItems(ph);
}
@FXML
void addLifeEvents() {
le.add(new LifeEventsInfo(recordAddLifeEventsEvent.getText(), Integer.parseInt(recordAddLifeEventsAge.getText()), recordAddLifeEventsResult.getText()));
recordAddLifeEvents.setItems(le);
}
@FXML
void addFamMedHistory() {
fh.add(new FamHistoryInfo(recordAddFamHistoryRel.getText(), recordAddFamHistoryCon.getText(), recordAddFamHistoryTreatment.getText(), recordAddFamHistoryStatus.getText()));
famMedHistory.setItems(fh);
}
@FXML
void privatePhoneAction() {
//TODO
}
@FXML
void unableToWorkAction() {
//TODO
}
@FXML
void attendInterviewFamAction() {
//TODO
}
@FXML
void attendInterviewOthersAction() {
//TODO
}
@FXML
void interviewInstituteAction() {
//TODO
}
@FXML
void psychTypeAction() {
//TODO
}
@FXML
void residence2Action() {
//TODO
}
class AddRecordTask implements Runnable{
@Override
public void run() {
RadioButton ad = (RadioButton) residence.getSelectedToggle();
LocalDateTime now = LocalDateTime.now();
try {
recordAdd.setInt(1, Integer.parseInt(recordAddFileNumberT.getText()));//filenum
recordAdd.setString(2, dateFormat(now.getYear(), now.getMonthValue(), now.getDayOfMonth(),
now.getHour(), now.getMinute()));//date added
recordAdd.setString(3, recordAddPNameT.getText());//name
recordAdd.setString(4,pGenderM.isSelected() ? "M" : pGenderM.isSelected() ?"F":null);//gender
RadioButton mar = (RadioButton) mStatus.getSelectedToggle();
recordAdd.setString(5, mar == mStatusB ? "B" : mar == mStatusM ? "M" : mar == mStatusD ? "D" : mar == mStatusW ? "W" : mar == mStatusC ?"C":null);//marital status
recordAdd.setString(6, dateFormat(recordAddBDYear.getText(), recordAddBDMonth.getText(), recordAddBDDay.getText()));//birth date
recordAdd.setString(7, recordAddBirthPlaceT.getText());//place of birth
recordAdd.setString(8, recordAddCGNameT.getText());//care giver name
recordAdd.setString(9, cgGenderM.isSelected() ? "M" : cgGenderM.isSelected() ?"F":null);//care giver gender
recordAdd.setString(10, dateFormat(recordAddCGBDYear.getText(),
recordAddCGBDMonth.getText(),
recordAddCGBDDay.getText()));//cg birth date
recordAdd.setString(11, recordAddRelT.getText());//cg relationship
recordAdd.setString(12, recordAddAttendedCB.isSelected() ? "T" : "F");//cg attended
recordAdd.setString(13, recordAddNationalIDT.getText());//national id
recordAdd.setString(14, residence1M.isSelected() ? "M" :residence1R.isSelected() ? "R":null);//residence 1
recordAdd.setString(15, recordAddResidence2CB1.isSelected() ? "Y" : "N");//residence 2.1
recordAdd.setString(16, recordAddResidence2CB2.isSelected() ? "Y" : "N");//residence 2.2
recordAdd.setString(17, (ad == adressN ? "N" : ad == adressG ? "G" :
ad == adressD ? "D" : ad == adressR ? "R" :ad == adressK? "K":null));//address
recordAdd.setString(18, famType.getSelectedToggle() == famTypeA ? "A" : famType.getSelectedToggle() ==
famTypeN ? "N" :famType.getSelectedToggle() ==
famTypeE?"E":null);//family type
recordAdd.setString(19, residenceTypeGroup.getSelectedToggle() == residenceTypeO ? "owner" :
residenceTypeGroup.getSelectedToggle() == residenceTypeR ? "rent" :
residenceTypeGroup.getSelectedToggle() == residenceTypeN ?
residenceType.getText():null);//residence type
recordAdd.setString(20, recordAddPhoneT.getText());//phone number
recordAdd.setString(21, privatePhoneY.isSelected() ? "Y" :
privatePhoneN.isSelected() ?phoneTypeNameT.getText():null);//phone type
recordAdd.setString(22, recordAddMessagesCB.isSelected() ? "y" : "n");
recordAdd.setString(23, recordAddVisitsCB.isSelected() ? "y" : "n");
recordAdd.setString(24, recordAddMeetFamCB.isSelected() ? "y" : "n");
RadioButton edu = (RadioButton) education.getSelectedToggle();
recordAdd.setString(25, edu == educationI ? "I" : edu == educationP ? "P" : edu == educationS ? "S"
: edu == educationU ?"U":null);//education
recordAdd.setInt(26,recordAddEducationYearsT.getText().equals("")?0: Integer.parseInt(recordAddEducationYearsT.getText()));//education years
recordAdd.setString(27, recordAddOccupationT.getText());//occupation
recordAdd.setString(28, jobGroup.getSelectedToggle() == occupationalStatusE ? "E" :
jobGroup.getSelectedToggle() == occupationalStatusN ? "N" :
jobGroup.getSelectedToggle() == occupationalStatusU ?"U":null);//occupational status
recordAdd.setFloat(29, recordAddSalaryT.getText().equals("")?0
:Float.parseFloat(recordAddSalaryT.getText()));//salary
recordAdd.setString(30, recordAddMHPName.getText());//mhp name
recordAdd.setString(31, consentY.isSelected() ? "Y" : consentN.isSelected() ?"N":null);//consent
recordAdd.setString(32, recordAddPAttendedCB.isSelected() ? "Y" : "N");//patient attended
recordAdd.setString(33, recordAddCGAttendedCB.isSelected() ? "Y" : "N");//cg attended
recordAdd.setString(34, recordAddFamAttendedCB.isSelected() ? recordAddFamAttendedT.getText() : "N");//family attended
recordAdd.setString(35, recordAddOthersAttendedCB.isSelected() ? recordAddOthersAttendedT.getText() : "N");//other attended
recordAdd.setString(36, interviewLocation.getSelectedToggle() == interviewPlaceC ? "clinic" :
interviewLocation.getSelectedToggle() == interviewPlaceH ? "home" :
interviewLocation.getSelectedToggle() == interviewPlaceI ?recordAddInstitute.getText():null);//interview location
recordAdd.setString(37, recordAddComplaintPT.getText());
recordAdd.setString(38, recordAddComplaintCGT.getText());
recordAdd.setString(39, recordAddComplaintFT.getText());
recordAdd.setString(40, recordAddCurrentHistory.getText());
recordAdd.setString(41, recordAddConflicts.getText());//conflicts
recordAdd.setString(42, recordAddSES.getText());//SES
recordAdd.setString(43, recordAddMSE1.getText());//looks
recordAdd.setString(44, recordAddMSE2.getText());
recordAdd.setString(45, recordAddMSE3.getText());
recordAdd.setString(46, recordAddMSE4.getText());
recordAdd.setString(47, recordAddMSE5.getText());
recordAdd.setString(48, recordAddMSE6.getText());
recordAdd.setString(49, recordAddMSE7.getText());
recordAdd.setString(50, recordAddMSE8.getText());
recordAdd.setString(51, recordAddMSE9.getText());
recordAdd.setString(52, recordAddMSE10.getText());
recordAdd.setString(53, recordAddMSE11.getText());
recordAdd.setString(54, recordAddMSE12.getText());//control
recordAdd.setString(55, selfHarmY.isSelected() ? "Y" : selfHarmN.isSelected() ?"N":null);
recordAdd.setString(56, othersHarmY.isSelected() ? "Y" : othersHarmN.isSelected() ?"N":null);
recordAdd.setString(57, healthDeterY.isSelected() ? "Y" : healthDeterN.isSelected() ?"N":null);
recordAdd.setString(58, needOfProtectionY.isSelected() ? "Y" : needOfProtectionN.isSelected() ?"N":null);//need for protection
RadioButton ps = (RadioButton) psychTypeGroup.getSelectedToggle();
recordAdd.setString(59, ps == addRecordBehave ? "behave" : ps == addRecordCBT ? "cbt"
: ps == addRecordCounseling ? "counseling" : ps == addRecordPS ? "ps" : addRecordTherapyOtherT.getText());//psych type
recordAdd.setString(60, addRecordPsychMHPName.getText());
recordAdd.setInt(61, addRecordPsychSessions.getText().equals("")?0
:Integer.parseInt(addRecordPsychSessions.getText()));
recordAdd.setString(62, dateFormat(addRecordPsychFirstDateYear.getText(),
addRecordPsychFirstDateMonth.getText(),
addRecordPsychFirstDateDay.getText()));
recordAdd.setString(63, addRecordPsychPlace.getText());
recordAdd.setString(64, summary.getText());
recordAdd.setString(65, personalHistoryPregnancy.getText());
recordAdd.setString(66, personalHistoryChildhood.getText());
recordAdd.setString(67, personalHistoryTeens.getText());
recordAdd.setString(68, personalHistoryMarriage.getText());
recordAdd.setString(69, personalHistoryWork.getText());
recordAdd.setString(70, personalHistoryHobbies.getText());
System.out.println(recordAdd.toString());
recordAdd.execute();
int id = Integer.parseInt(recordAddFileNumberT.getText());
for (PreviousHistoryInfo i : recordAddPreviousHistory.getItems()) {
recordAddPreviousHistorySTM.setString(1, i.getCondition());
recordAddPreviousHistorySTM.setInt(2, i.getAge());
recordAddPreviousHistorySTM.setInt(3, i.getDuration());
recordAddPreviousHistorySTM.setString(4, i.getTreatment());
recordAddPreviousHistorySTM.setString(5, i.getResult());
recordAddPreviousHistorySTM.setInt(6, id);
recordAddPreviousHistorySTM.execute();
}
for (LifeEventsInfo i : recordAddLifeEvents.getItems()) {
recordAddLifeEventsSTM.setString(1, i.getEvent());
recordAddLifeEventsSTM.setInt(2, i.getAge());
recordAddLifeEventsSTM.setString(3, i.getResult());
recordAddLifeEventsSTM.setInt(4, id);
recordAddLifeEventsSTM.execute();
}
for (FamHistoryInfo i : famMedHistory.getItems()) {
recordAddFamHistorySTM.setString(1, i.getRel());
recordAddFamHistorySTM.setString(2, i.getDisease());
recordAddFamHistorySTM.setString(3, i.getTreatment());
recordAddFamHistorySTM.setString(4, i.getResult());
recordAddFamHistorySTM.setInt(5, id);
recordAddFamHistorySTM.execute();
}
updateRecordSearch();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
}
<file_sep>/src/tableInfo/FamHistoryInfo.java
package tableInfo;
public class FamHistoryInfo {
private String rel,disease,treatment,result;
public FamHistoryInfo(String rel, String disease, String treatment, String result) {
this.rel = rel;
this.disease = disease;
this.treatment = treatment;
this.result = result;
}
public String getRel() {
return rel;
}
public void setRel(String rel) {
this.rel = rel;
}
public String getDisease() {
return disease;
}
public void setDisease(String disease) {
this.disease = disease;
}
public String getTreatment() {
return treatment;
}
public void setTreatment(String treatment) {
this.treatment = treatment;
}
public String getResult() {
return result;
}
public void setResult(String result) {
this.result = result;
}
}
<file_sep>/src/main/AppointmentPane.java
package main;
import javafx.geometry.Insets;
import javafx.scene.control.Label;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import javafx.scene.text.Font;
import tableInfo.AppointmentInfo;
//used for "Today's appointments" list
public class AppointmentPane extends VBox {
private AppointmentInfo info;
public AppointmentPane(AppointmentInfo info) {
this.info = info;
setPrefWidth(260);
setPrefHeight(75);
HBox top = new HBox();
Font f = new Font("monospace",18);
Label name = new Label(info.getName());
name.setPadding(new Insets(0,10,0,10));
name.setFont(f);
Label time = new Label(info.getTime().split(" ")[1]);
time.setPadding(new Insets(0,10,0,10));
time.setFont(f);
top.getChildren().addAll(time,name);
Label place = new Label(info.getPlace());
place.setFont(f);
getChildren().addAll(top,place);
}
}
<file_sep>/src/controllers/RecordViewController.java
package controllers;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.control.*;
import tableInfo.FamHistoryInfo;
import tableInfo.LifeEventsInfo;
import tableInfo.PreviousHistoryInfo;
import java.net.URL;
import java.sql.*;
import java.util.ResourceBundle;
public class RecordViewController implements Initializable {
Connection con;
Statement stm;
ResultSet rs;
PreparedStatement loadRecordSTM,loadPrevHistory,loadFamHistory,loadLifeEvents;
@FXML
TextField recordAddPNameT;
@FXML
TextField recordAddCGNameT;
@FXML
TextField recordAddRelT;
@FXML
TextField recordAddCGAgeT;
@FXML
RadioButton pGenderF;
@FXML
ToggleGroup patientGender;
@FXML
RadioButton pGenderM;
@FXML
RadioButton cgGenderF;
@FXML
ToggleGroup careGiverGender;
@FXML
RadioButton cgGenderM;
@FXML
RadioButton mStatusB;
@FXML
ToggleGroup mStatus;
@FXML
RadioButton mStatusM;
@FXML
RadioButton mStatusD;
@FXML
RadioButton mStatusW;
@FXML
RadioButton mStatusH;
@FXML
RadioButton mStatusC;
@FXML
TextField recordAddBirthPlaceT;
@FXML
RadioButton residence1M;
@FXML
ToggleGroup citizen1;
@FXML
RadioButton residence1R;
@FXML
CheckBox recordAddResidence2CB1;
@FXML
CheckBox recordAddResidence2CB2;
@FXML
RadioButton adressN;
@FXML
ToggleGroup residence;
@FXML
RadioButton adressG;
@FXML
RadioButton adressD;
@FXML
RadioButton adressR;
@FXML
RadioButton adressK;
@FXML
RadioButton famTypeE;
@FXML
ToggleGroup famType;
@FXML
RadioButton famTypeA;
@FXML
RadioButton famTypeN;
@FXML
RadioButton residenceTypeO;
@FXML
ToggleGroup residenceTypeGroup;
@FXML
RadioButton residenceTypeR;
@FXML
RadioButton residenceTypeN;
@FXML
RadioButton privatePhoneY;
@FXML
ToggleGroup phoneType;
@FXML
RadioButton privatePhoneN;
@FXML
RadioButton educationI;
@FXML
ToggleGroup education;
@FXML
RadioButton educationP;
@FXML
RadioButton educationS;
@FXML
RadioButton educationU;
@FXML
TextField recordAddEducationYearsT;
@FXML
RadioButton occupationalStatusE;
@FXML
ToggleGroup jobGroup;
@FXML
RadioButton occupationalStatusN;
@FXML
RadioButton occupationalStatusU;
@FXML
TextField phoneTypeNameT;
@FXML
RadioButton consentY;
@FXML
ToggleGroup consent;
@FXML
RadioButton consentN;
@FXML
RadioButton interviewPlaceC;
@FXML
CheckBox recordAddAttendedCB;
@FXML
ToggleGroup interviewLocation;
@FXML
RadioButton interviewPlaceH;
@FXML
RadioButton interviewPlaceI;
@FXML
TextField recordAddNationalIDT;
@FXML
CheckBox recordAddFamAttendedCB;
@FXML
TextField personalHistoryPregnancy;
@FXML
TextField personalHistoryChildhood;
@FXML
TextField personalHistoryTeens;
@FXML
TextField personalHistoryMarriage;
@FXML
TextField personalHistoryWork;
@FXML
TextField personalHistoryHobbies;
@FXML
TableView<PreviousHistoryInfo> recordAddPreviousHistory;
@FXML
TableView<LifeEventsInfo> recordAddLifeEvents;
@FXML
TableView<FamHistoryInfo> famMedHistory;
@FXML
TextField recordAddOccupationT;
@FXML
TextField residenceType;
@FXML
TextField recordAddFamAttendedT;
@FXML
TextField recordAddPhoneT;
@FXML
TextField recordAddMHPName;
@FXML
TextField recordAddSalaryT;
@FXML
CheckBox recordAddMessagesCB;
@FXML
CheckBox recordAddVisitsCB;
@FXML
CheckBox recordAddMeetFamCB;
@FXML
RadioButton selfHarmY;
@FXML
ToggleGroup selfHarmGroup;
@FXML
RadioButton selfHarmN;
@FXML
RadioButton othersHarmY;
@FXML
ToggleGroup peopleHarmGroup;
@FXML
RadioButton othersHarmN;
@FXML
RadioButton healthDeterY;
@FXML
ToggleGroup mentalDeterGroup;
@FXML
RadioButton healthDeterN;
@FXML
RadioButton needOfProtectionY;
@FXML
ToggleGroup protectionGroup;
@FXML
RadioButton needOfProtectionN;
@FXML
TextField recordAddFileNumberT;
@FXML
TextField recordAddBDYear;
@FXML
TextField recordAddBDMonth;
@FXML
TextField recordAddBDDay;
@FXML
CheckBox recordAddOthersAttendedCB;
@FXML
TextField recordAddOthersAttendedT;
@FXML
TextField recordAddInstitute;
@FXML
TextField recordAddComplaintPT;
@FXML
TextField recordAddComplaintCGT;
@FXML
TextField recordAddComplaintFT;
@FXML
TextArea recordAddCurrentHistory;
@FXML
TextField recordAddMSE1;
@FXML
TextField recordAddMSE2;
@FXML
TextField recordAddMSE3;
@FXML
TextField recordAddMSE4;
@FXML
TextField recordAddMSE5;
@FXML
TextField recordAddMSE6;
@FXML
TextField recordAddMSE7;
@FXML
TextField recordAddMSE8;
@FXML
TextField recordAddMSE9;
@FXML
TextField recordAddMSE10;
@FXML
TextField recordAddMSE11;
@FXML
TextField recordAddMSE12;
@FXML
RadioButton addRecordCounseling;
@FXML
ToggleGroup psychTypeGroup;
@FXML
RadioButton addRecordCBT;
@FXML
RadioButton addRecordBehave;
@FXML
RadioButton addRecordPS;
@FXML
RadioButton recordAddTherapyOther;
@FXML
TextField addRecordTherapyOtherT;
@FXML
TextField addRecordPsychMHPName;
@FXML
TextField addRecordPsychSessions;
@FXML
TextField addRecordPsychFirstDate;
@FXML
TextField addRecordPsychPlace;
@FXML
TextArea summary;
@FXML
CheckBox recordAddPAttendedCB;
@FXML
CheckBox recordAddCGAttendedCB;
@FXML
TextField recordAddConflicts;
@FXML
TextField recordAddSES;
@Override
public void initialize(URL location, ResourceBundle resources) {
}
public void loadRecord(int id,Connection con){
this.con = con;
try {
loadRecordSTM = con.prepareStatement("select * from Patient where filenumber ="+id);
loadPrevHistory = con.prepareStatement("select * from previous_history where Patient_filenumber = "+id);
loadFamHistory = con.prepareStatement("select * from fam_med_history where Patient_filenumber = "+id);
loadLifeEvents = con.prepareStatement("select * from life_events where Patient_filenumber = "+id);
rs = loadRecordSTM.executeQuery();
rs.next();
recordAddFileNumberT.setText(Integer.toString(rs.getInt(1)));
recordAddPNameT.setText(rs.getString(3));
if("M".equals(rs.getString(4)))pGenderM.setSelected(true); else if("F".equals(rs.getString(4))) pGenderF.setSelected(true);
String mar =rs.getString(5);
if(mar!=null)
switch (mar){
case "B":
mStatusB.setSelected(true);
break;
case "M":
mStatusM.setSelected(true);
break;
case "D":
mStatusD.setSelected(true);
break;
case "W":
mStatusW.setSelected(true);
break;
default:
mStatusC.setSelected(true);
}
if(rs.getString(6)!=null){
String[] bd=rs.getString(6).split("-");
recordAddBDDay.setText(bd[2]);
recordAddBDMonth.setText(bd[1]);
recordAddBDYear.setText(bd[0]);
}
recordAddBirthPlaceT.setText(rs.getString(7));
recordAddCGNameT.setText(rs.getString(8));
if("M".equals(rs.getString(9))) cgGenderM.setSelected(true); else if("F".equals(rs.getString(9))) cgGenderF.setSelected(true);
recordAddCGAgeT.setText(rs.getString(10));
recordAddRelT.setText(rs.getString(11));
if("T".equals(rs.getString(12)))recordAddAttendedCB.setSelected(true);else recordAddAttendedCB.setSelected(false);
recordAddNationalIDT.setText(rs.getString(13));
if("M".equals(rs.getString(14))) residence1M.setSelected(true); else residence1R.setSelected(true);
if(rs.getString(15).equals("Y")) recordAddResidence2CB1.setSelected(true);
if(rs.getString(16).equals("Y")) recordAddResidence2CB2.setSelected(true);
if(rs.getString(17)!=null)
switch (rs.getString(17)){
case "N":
adressN.setSelected(true);
break;
case "G":
adressG.setSelected(true);
break;
case "D":
adressD.setSelected(true);
break;
case "R":
adressR.setSelected(true);
break;
case "K":
adressK.setSelected(true);
break;
}
if(rs.getString(18)!=null)
switch (rs.getString(18)){
case "A":
famTypeA.setSelected(true);
break;
case "N":
famTypeA.setSelected(true);
break;
case "E":
famTypeA.setSelected(true);
break;
}
if(rs.getString(19)!=null)
switch (rs.getString(19)){
case "owner":
residenceTypeO.setSelected(true);
break;
case "rent":
residenceTypeR.setSelected(true);
break;
default:
residenceTypeN.setSelected(true);
residenceType.setText(rs.getString(19));
}
recordAddPhoneT.setText(rs.getString(20));
if("Y".equals(rs.getString(21))) privatePhoneY.setSelected(true);
else if("N".equals(rs.getString(21))){
privatePhoneN.setSelected(true);
phoneTypeNameT.setText(rs.getString(21));
}
if( rs.getString(22).equals("y")) recordAddMessagesCB.setSelected(true); else recordAddMessagesCB.setSelected(false);
if( rs.getString(23).equals("y")) recordAddVisitsCB.setSelected(true); else recordAddVisitsCB.setSelected(false);
if( rs.getString(24).equals("y")) recordAddMeetFamCB.setSelected(true); else recordAddMeetFamCB.setSelected(false);
if(rs.getString(25)!=null)
switch (rs.getString(25)){
case "I":
educationI.setSelected(true);
break;
case "P":
educationP.setSelected(true);
break;
case "S":
educationS.setSelected(true);
break;
case "U":
educationU.setSelected(true);
}
recordAddEducationYearsT.setText(Integer.toString(rs.getInt(26)));
recordAddOccupationT.setText(rs.getString(27));
if(rs.getString(28)!=null)
switch (rs.getString(28)){
case "E":
occupationalStatusE.setSelected(true);
break;
case "N":
occupationalStatusN.setSelected(true);
break;
case "U":
occupationalStatusU.setSelected(true);
break;
}
recordAddSalaryT.setText(Float.toString(rs.getFloat(29)));
recordAddMHPName.setText(rs.getString(30));
if("y".equals(rs.getString(31))) consentY.setSelected(true);
else if("n".equals(rs.getString(31))) consentN.setSelected(true);
if( "Y".equals(rs.getString(32))) recordAddPAttendedCB.setSelected(true);
else if( "N".equals(rs.getString(32))) recordAddPAttendedCB.setSelected(false);
if( "Y".equals(rs.getString(33))) recordAddCGAttendedCB.setSelected(true);
else if( "N".equals(rs.getString(33))) recordAddCGAttendedCB.setSelected(false);
if( rs.getString(34).equals("N")){
recordAddFamAttendedCB.setSelected(false);
}
else{
recordAddFamAttendedCB.setSelected(true);
recordAddFamAttendedT.setText(rs.getString(34));
}
if( rs.getString(35).equals("N")){
recordAddOthersAttendedCB.setSelected(false);
}
else{
recordAddOthersAttendedCB.setSelected(true);
recordAddOthersAttendedT.setText(rs.getString(35));
}
if(rs.getString(36)!=null)
switch (rs.getString(36)){
case "clinic":
interviewPlaceC.setSelected(true);
break;
case "home":
interviewPlaceH.setSelected(true);
break;
default:
interviewPlaceI.setSelected(true);
recordAddInstitute.setText(rs.getString(36));
}
recordAddComplaintPT.setText(rs.getString(37));
recordAddComplaintCGT.setText(rs.getString(38));
recordAddComplaintFT.setText(rs.getString(39));
recordAddCurrentHistory.setText(rs.getString(40));
recordAddConflicts.setText(rs.getString(41));
recordAddSES.setText(rs.getString(42));
recordAddMSE1.setText(rs.getString(43));
recordAddMSE2.setText(rs.getString(44));
recordAddMSE3.setText(rs.getString(45));
recordAddMSE4.setText(rs.getString(46));
recordAddMSE5.setText(rs.getString(47));
recordAddMSE6.setText(rs.getString(48));
recordAddMSE7.setText(rs.getString(49));
recordAddMSE8.setText(rs.getString(50));
recordAddMSE9.setText(rs.getString(51));
recordAddMSE10.setText(rs.getString(52));
recordAddMSE11.setText(rs.getString(53));
recordAddMSE12.setText(rs.getString(54));
if("Y".equals(rs.getString(55))) selfHarmY.setSelected(true);
else if("N".equals(rs.getString(55)))selfHarmN.setSelected(true);
if("Y".equals(rs.getString(56))) othersHarmY.setSelected(true);
else if("N".equals(rs.getString(56)))othersHarmN.setSelected(true);
if("Y".equals(rs.getString(57))) healthDeterY.setSelected(true);
else if("N".equals(rs.getString(57)))healthDeterN.setSelected(true);
if("Y".equals(rs.getString(58))) needOfProtectionY.setSelected(true);
else if("N".equals(rs.getString(58)))needOfProtectionN.setSelected(true);
if(rs.getString(59)!=null)
switch (rs.getString(59)){
case "behave":
addRecordBehave.setSelected(true);
break;
case "cbt":
addRecordCBT.setSelected(true);
break;
case "counseling":
addRecordCounseling.setSelected(true);
break;
case "ps":
addRecordPS.setSelected(true);
break;
default:
addRecordTherapyOtherT.setText(rs.getString(59));
break;
}
addRecordPsychMHPName.setText(rs.getString(60));
addRecordPsychSessions.setText(Integer.toString(rs.getInt(61)));
addRecordPsychFirstDate.setText(rs.getString(62));
addRecordPsychPlace.setText(rs.getString(63));
summary.setText(rs.getString(64));
personalHistoryPregnancy.setText(rs.getString(65));
personalHistoryChildhood.setText(rs.getString(66));
personalHistoryTeens.setText(rs.getString(67));
personalHistoryMarriage.setText(rs.getString(68));
personalHistoryWork.setText(rs.getString(69));
personalHistoryHobbies.setText(rs.getString(70));
rs = loadPrevHistory.executeQuery();
rs.beforeFirst();
ObservableList<PreviousHistoryInfo> pl = FXCollections.observableArrayList();
while(rs.next()){
pl.add(new PreviousHistoryInfo(rs.getString(1),rs.getInt(2),rs.getInt(3),
rs.getString(4),rs.getString(5)));
}
recordAddPreviousHistory.setItems(pl);
rs = loadFamHistory.executeQuery();
rs.beforeFirst();
ObservableList<FamHistoryInfo> fl = FXCollections.observableArrayList();
while(rs.next()){
fl.add(new FamHistoryInfo(rs.getString(1),rs.getString(2),rs.getString(3),
rs.getString(4)));
}
famMedHistory.setItems(fl);
rs = loadLifeEvents.executeQuery();
rs.beforeFirst();
ObservableList<LifeEventsInfo> ll = FXCollections.observableArrayList();
while(rs.next()){
ll.add(new LifeEventsInfo(rs.getString(1),rs.getInt(2),rs.getString(3)));
}
recordAddLifeEvents.setItems(ll);
} catch (SQLException e) {
e.printStackTrace();
}
}
}
<file_sep>/dbscript.sql
-- MySQL Script generated by MySQL Workbench
-- Thu Aug 16 20:59:41 2018
-- Model: New Model Version: 1.0
-- MySQL Workbench Forward Engineering
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION';
-- -----------------------------------------------------
-- Schema Aisha
-- -----------------------------------------------------
DROP SCHEMA IF EXISTS `Aisha` ;
-- -----------------------------------------------------
-- Schema Aisha
-- -----------------------------------------------------
CREATE SCHEMA IF NOT EXISTS `Aisha` DEFAULT CHARACTER SET cp1256 ;
USE `Aisha` ;
-- -----------------------------------------------------
-- Table `Aisha`.`Patient`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `Aisha`.`Patient` ;
CREATE TABLE IF NOT EXISTS `Aisha`.`Patient` (
`filenumber` INT NOT NULL,
`dateAdded` DATETIME NULL,
`name` VARCHAR(45) CHARACTER SET 'cp1256' NULL,
`gender` ENUM('M', 'F') NULL,
`maritalstatus` ENUM('B', 'M', 'D', 'W', 'H', 'C') NULL,
`birthdate` DATE NULL,
`placeofbirth` VARCHAR(45) NULL,
`cg_name` VARCHAR(45) NULL,
`cg_gender` ENUM('M', 'F') NULL,
`cg_birthdate` DATE NULL,
`cg_relationship` VARCHAR(45) NULL,
`cg_attendeded` ENUM('T', 'F') NULL,
`nationalid` CHAR(9) NULL,
`residence1` ENUM('M', 'R') NULL,
`residence2` ENUM('Y', 'N') NULL,
`residence3` ENUM('Y', 'N') NULL,
`address` ENUM('N', 'G', 'D', 'R', 'K') NULL,
`famtype` ENUM('E', 'N', 'A') NULL,
`residencetype` VARCHAR(45) NULL,
`phonenumber` CHAR(45) NULL,
`privatephone` VARCHAR(45) NULL,
`agreemessages` ENUM('y', 'n') NULL,
`agreehousevisit` ENUM('y', 'n') NULL,
`agreemeetfam` ENUM('y', 'n') NULL,
`education` ENUM('I', 'P', 'S', 'U') NULL,
`yearsofeducation` INT NULL,
`occupation` VARCHAR(45) NULL,
`occupationalstatus` ENUM('E', 'N', 'U') NULL,
`salary` FLOAT NULL,
`mhp_name` VARCHAR(45) NULL,
`consent` ENUM('Y', 'N') NULL,
`patient_attended` ENUM('Y', 'N') NULL,
`cg_attended` ENUM('Y', 'N') NULL,
`fam_attended` VARCHAR(45) NULL,
`others_attended` VARCHAR(45) NULL,
`interview_place` VARCHAR(45) NULL,
`chiefcomplaint_patient` VARCHAR(150) NULL,
`chiefcomplaint_cg` VARCHAR(150) NULL,
`chiefcomplaint_fam` VARCHAR(150) NULL,
`current_history` VARCHAR(500) NULL,
`conflicts` VARCHAR(45) NULL,
`socioeconomic_status` VARCHAR(45) NULL,
`looks` VARCHAR(150) NULL,
`behaviour` VARCHAR(150) NULL,
`mood` VARCHAR(150) NULL,
`talks` VARCHAR(150) NULL,
`knowledge` VARCHAR(150) NULL,
`awareness` VARCHAR(150) NULL,
`thinking1` VARCHAR(150) NULL,
`thinking2` VARCHAR(150) NULL,
`awareness2` VARCHAR(150) NULL,
`awareness3` VARCHAR(150) NULL,
`judgement` VARCHAR(150) NULL,
`control` VARCHAR(150) NULL,
`self_harm` ENUM('Y', 'N') NULL,
`others_harm` ENUM('Y', 'N') NULL,
`health_deter` ENUM('Y', 'N') NULL,
`need_of_protection` ENUM('Y', 'N') NULL,
`psych_type` VARCHAR(45) NULL,
`psych_mhp_name` VARCHAR(45) NULL,
`psych_number_of_sessions` INT NULL,
`psych_first_date` DATE NULL,
`psych_place` VARCHAR(45) NULL,
`summary` VARCHAR(1000) NULL,
`pregnancy` VARCHAR(45) NULL,
`childhood` VARCHAR(45) NULL,
`teens` VARCHAR(45) NULL,
`marriage` VARCHAR(45) NULL,
`work` VARCHAR(45) NULL,
`hobbies` VARCHAR(45) NULL,
`Patientcol` VARCHAR(45) NULL,
PRIMARY KEY (`filenumber`),
UNIQUE INDEX `nationalid_UNIQUE` (`nationalid` ASC) VISIBLE,
INDEX `name_Index` USING BTREE (`name`) VISIBLE)
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `Aisha`.`previous_history`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `Aisha`.`previous_history` ;
CREATE TABLE IF NOT EXISTS `Aisha`.`previous_history` (
`disease` VARCHAR(45) NULL,
`age` INT NULL,
`duration` INT NULL,
`treatment` VARCHAR(45) NULL,
`result` VARCHAR(45) NULL,
`Patient_filenumber` INT NOT NULL,
`id` INT NOT NULL AUTO_INCREMENT,
INDEX `fk_previous_history_Patient1_idx` (`Patient_filenumber` ASC) VISIBLE,
PRIMARY KEY (`id`),
CONSTRAINT `fk_previous_history_Patient1`
FOREIGN KEY (`Patient_filenumber`)
REFERENCES `Aisha`.`Patient` (`filenumber`)
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `Aisha`.`life_events`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `Aisha`.`life_events` ;
CREATE TABLE IF NOT EXISTS `Aisha`.`life_events` (
`event` VARCHAR(45) NULL,
`age` INT NULL,
`result` VARCHAR(45) NULL,
`Patient_filenumber` INT NOT NULL,
`id` INT NOT NULL AUTO_INCREMENT,
INDEX `fk_life_events_Patient1_idx` (`Patient_filenumber` ASC) VISIBLE,
PRIMARY KEY (`id`),
CONSTRAINT `fk_life_events_Patient1`
FOREIGN KEY (`Patient_filenumber`)
REFERENCES `Aisha`.`Patient` (`filenumber`)
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `Aisha`.`fam_med_history`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `Aisha`.`fam_med_history` ;
CREATE TABLE IF NOT EXISTS `Aisha`.`fam_med_history` (
`relationship` VARCHAR(45) NULL,
`disease` VARCHAR(45) NULL,
`treatment` VARCHAR(45) NULL,
`result` VARCHAR(45) NULL,
`Patient_filenumber` INT NOT NULL,
`id` INT NOT NULL AUTO_INCREMENT,
INDEX `fk_fam_med_history_Patient1_idx` (`Patient_filenumber` ASC) VISIBLE,
PRIMARY KEY (`id`),
CONSTRAINT `fk_fam_med_history_Patient1`
FOREIGN KEY (`Patient_filenumber`)
REFERENCES `Aisha`.`Patient` (`filenumber`)
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `Aisha`.`GHQ`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `Aisha`.`GHQ` ;
CREATE TABLE IF NOT EXISTS `Aisha`.`GHQ` (
`q1` ENUM('1', '2', '3', '4') NULL,
`q2` ENUM('1', '2', '3', '4') NULL,
`q3` ENUM('1', '2', '3', '4') NULL,
`q4` ENUM('1', '2', '3', '4') NULL,
`q5` ENUM('1', '2', '3', '4') NULL,
`q6` ENUM('1', '2', '3', '4') NULL,
`q7` ENUM('1', '2', '3', '4') NULL,
`q8` ENUM('1', '2', '3', '4') NULL,
`q9` ENUM('1', '2', '3', '4') NULL,
`q10` ENUM('1', '2', '3', '4') NULL,
`q11` ENUM('1', '2', '3', '4') NULL,
`q12` ENUM('1', '2', '3', '4') NULL,
`id` INT NOT NULL,
PRIMARY KEY (`id`))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `Aisha`.`WHODAS`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `Aisha`.`WHODAS` ;
CREATE TABLE IF NOT EXISTS `Aisha`.`WHODAS` (
`q1` ENUM('1', '2', '3', '4', '5') NULL,
`q2` ENUM('1', '2', '3', '4', '5') NULL,
`q3` ENUM('1', '2', '3', '4', '5') NULL,
`q4` ENUM('1', '2', '3', '4', '5') NULL,
`q5` ENUM('1', '2', '3', '4', '5') NULL,
`q6` ENUM('1', '2', '3', '4', '5') NULL,
`q7` ENUM('1', '2', '3', '4', '5') NULL,
`q8` ENUM('1', '2', '3', '4', '5') NULL,
`q9` ENUM('1', '2', '3', '4', '5') NULL,
`q10` ENUM('1', '2', '3', '4', '5') NULL,
`q11` ENUM('1', '2', '3', '4', '5') NULL,
`q12` ENUM('1', '2', '3', '4', '5') NULL,
`id` INT NOT NULL,
PRIMARY KEY (`id`))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `Aisha`.`PHQ`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `Aisha`.`PHQ` ;
CREATE TABLE IF NOT EXISTS `Aisha`.`PHQ` (
`q1` ENUM('1', '2', '3', '4') NULL,
`q2` ENUM('1', '2', '3', '4') NULL,
`q3` ENUM('1', '2', '3', '4') NULL,
`q4` ENUM('1', '2', '3', '4') NULL,
`q5` ENUM('1', '2', '3', '4') NULL,
`q6` ENUM('1', '2', '3', '4') NULL,
`q7` ENUM('1', '2', '3', '4') NULL,
`q8` ENUM('1', '2', '3', '4') NULL,
`q9` ENUM('1', '2', '3', '4') NULL,
`id` INT NOT NULL,
PRIMARY KEY (`id`))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `Aisha`.`GAD`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `Aisha`.`GAD` ;
CREATE TABLE IF NOT EXISTS `Aisha`.`GAD` (
`q1` ENUM('1', '2', '3', '4') NULL,
`q2` ENUM('1', '2', '3', '4') NULL,
`q3` ENUM('1', '2', '3', '4') NULL,
`q4` ENUM('1', '2', '3', '4') NULL,
`q5` ENUM('1', '2', '3', '4') NULL,
`q6` ENUM('1', '2', '3', '4') NULL,
`q7` ENUM('1', '2', '3', '4') NULL,
`id` INT NOT NULL,
PRIMARY KEY (`id`))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `Aisha`.`plan_medical_treatment`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `Aisha`.`plan_medical_treatment` ;
CREATE TABLE IF NOT EXISTS `Aisha`.`plan_medical_treatment` (
`id` INT NOT NULL AUTO_INCREMENT,
`name` VARCHAR(45) NULL,
`dose` VARCHAR(45) NULL,
`duration` VARCHAR(45) NULL,
`frequency` INT NULL,
`doctor_name` VARCHAR(45) NULL,
`Patient_filenumber` INT NOT NULL,
PRIMARY KEY (`id`),
INDEX `fk_plan_medical_treatment_Patient1_idx` (`Patient_filenumber` ASC) VISIBLE,
CONSTRAINT `fk_plan_medical_treatment_Patient1`
FOREIGN KEY (`Patient_filenumber`)
REFERENCES `Aisha`.`Patient` (`filenumber`)
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `Aisha`.`Appointment`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `Aisha`.`Appointment` ;
CREATE TABLE IF NOT EXISTS `Aisha`.`Appointment` (
`Patient_filenumber` INT NOT NULL,
`date_added` DATETIME NULL,
`date` DATETIME NOT NULL,
`mhp_name` VARCHAR(45) NULL,
`place` VARCHAR(65) NULL,
PRIMARY KEY (`Patient_filenumber`, `date`),
INDEX `fk_Appointment_Patient1_idx` (`Patient_filenumber` ASC) VISIBLE,
CONSTRAINT `fk_Appointment_Patient1`
FOREIGN KEY (`Patient_filenumber`)
REFERENCES `Aisha`.`Patient` (`filenumber`)
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `Aisha`.`visit`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `Aisha`.`visit` ;
CREATE TABLE IF NOT EXISTS `Aisha`.`visit` (
`PHQ_id` INT NULL,
`GHQ_id` INT NULL,
`WHODAS_id` INT NULL,
`GAD_id` INT NULL,
`subjective` VARCHAR(100) NULL,
`objective` VARCHAR(100) NULL,
`assessment` VARCHAR(100) NULL,
`plan` VARCHAR(100) NULL,
`next_visit` DATE NULL,
`place` VARCHAR(45) NULL,
`next_mhp_name` VARCHAR(45) NULL,
`Appointment_Patient_filenumber` INT NOT NULL,
`Appointment_date` DATETIME NOT NULL,
INDEX `fk_visit_PHQ1_idx` (`PHQ_id` ASC) VISIBLE,
INDEX `fk_visit_GHQ1_idx` (`GHQ_id` ASC) VISIBLE,
INDEX `fk_visit_WHODAS1_idx` (`WHODAS_id` ASC) VISIBLE,
INDEX `fk_visit_GAD1_idx` (`GAD_id` ASC) VISIBLE,
PRIMARY KEY (`Appointment_Patient_filenumber`, `Appointment_date`),
CONSTRAINT `fk_visit_PHQ1`
FOREIGN KEY (`PHQ_id`)
REFERENCES `Aisha`.`PHQ` (`id`)
ON DELETE NO ACTION
ON UPDATE NO ACTION,
CONSTRAINT `fk_visit_GHQ1`
FOREIGN KEY (`GHQ_id`)
REFERENCES `Aisha`.`GHQ` (`id`)
ON DELETE NO ACTION
ON UPDATE NO ACTION,
CONSTRAINT `fk_visit_WHODAS1`
FOREIGN KEY (`WHODAS_id`)
REFERENCES `Aisha`.`WHODAS` (`id`)
ON DELETE NO ACTION
ON UPDATE NO ACTION,
CONSTRAINT `fk_visit_GAD1`
FOREIGN KEY (`GAD_id`)
REFERENCES `Aisha`.`GAD` (`id`)
ON DELETE NO ACTION
ON UPDATE NO ACTION,
CONSTRAINT `fk_visit_Appointment1`
FOREIGN KEY (`Appointment_Patient_filenumber` , `Appointment_date`)
REFERENCES `Aisha`.`Appointment` (`Patient_filenumber` , `date`)
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `Aisha`.`visit_medical_treatment`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `Aisha`.`visit_medical_treatment` ;
CREATE TABLE IF NOT EXISTS `Aisha`.`visit_medical_treatment` (
`id` INT NOT NULL AUTO_INCREMENT,
`name` VARCHAR(45) NULL,
`dose` VARCHAR(45) NULL,
`duration` VARCHAR(45) NULL,
`frequency` INT NULL,
`doctor_name` VARCHAR(45) NULL,
`visit_Appointment_Patient_filenumber` INT NOT NULL,
`visit_Appointment_date` DATETIME NOT NULL,
PRIMARY KEY (`id`),
INDEX `fk_visit_medical_treatment_visit1_idx` (`visit_Appointment_Patient_filenumber` ASC, `visit_Appointment_date` ASC) VISIBLE,
CONSTRAINT `fk_visit_medical_treatment_visit1`
FOREIGN KEY (`visit_Appointment_Patient_filenumber` , `visit_Appointment_date`)
REFERENCES `Aisha`.`visit` (`Appointment_Patient_filenumber` , `Appointment_date`)
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB;
SET SQL_MODE=@OLD_SQL_MODE;
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;
| 89481a9a1d43af2f0d083ab011e0a24bdc8c3d20 | [
"Java",
"SQL"
] | 7 | Java | MasriMCP/Aisha | 57d1291c486e000bdd3b830ad3f3486e74c996c3 | a72b30b69bb26b8f0c81e535da7d2cfedcb36019 |
refs/heads/master | <file_sep># bookstore_utils-go
Go shared utils library for bookstore project
<file_sep>package rest_errors
import (
"fmt"
"net/http"
)
type RestError interface {
Message() string
Status() int
Error() string
Causes() []interface{}
}
type restError struct {
message string `json:"message"`
status int `json:"code"`
error string `json:"error"`
causes []interface{} `json:"causes"`
}
func (e restError) Error() string {
return fmt.Sprintf("message: %s - status: %d - error: %s - causes: [%v]",
e.message, e.status, e.error, e.causes)
}
func (e restError) Message() string {
return e.message
}
func (e restError) Status() int {
return e.status
}
func (e restError) Causes() []interface{} {
return e.causes
}
func NewRestError(msg string, status int, err string, causes []interface{}) RestError {
return restError{
message: msg,
status: status,
error: err,
causes: causes,
}
}
func NewBadRequestError(message string) RestError {
return restError{
message: message,
status: http.StatusBadRequest,
error: http.StatusText(http.StatusBadRequest),
}
}
func NewNotFoundError(message string) RestError {
return restError{
message: message,
status: http.StatusNotFound,
error: http.StatusText(http.StatusNotFound),
}
}
func NewInternalServerError(message string, err error) RestError {
result := restError{
message: message,
status: http.StatusInternalServerError,
error: http.StatusText(http.StatusInternalServerError),
}
if err != nil {
result.causes = append(result.causes, err.Error())
}
return result
}
| 9d73131919ea8dade47010c50086c8d7b1642411 | [
"Markdown",
"Go"
] | 2 | Markdown | sampado/bookstore_utils-go | 25f5b390d8cc65823203f50458475106047cda95 | 92d75b7ea55d247f8db1f91f273c1b34c51e6831 |
refs/heads/master | <repo_name>antoniovazquezblanco/dsPIC33FJ128MC802_SPI<file_sep>/spi.c
#include "spi.h"
#include <p33fj128mc802.h>
void SPI_Remap(void)
{
// Remap peripherals
__builtin_write_OSCCONL(OSCCON & 0xDF); // Unlock PPS
// Input pins (dsPIC)
RPINR20bits.SDI1R = 10; // SDI
RPINR0bits.INT1R = 13; // INT
// Output pins (dsPIC)
RPOR6bits.RP12R = 7; // SDO
RPOR5bits.RP11R = 8; // SCK
__builtin_write_OSCCONL(OSCCON | 0x40); // Lock PPS
// Configure input ports (dsPIC)
TRISBbits.TRISB10 = 1; // SDI
// Configure output ports (dsPIC)
TRISBbits.TRISB11 = 0; // SCK
TRISBbits.TRISB12 = 0; // SDO
}
void SPI_Init(void)
{
// Remap SPI pins
SPI_Remap();
SPI1STATbits.SPIEN = 0; // Disable the SPI module
SPI1CON1bits.DISSCK = 0; // Enable control of SCKx port
SPI1CON1bits.DISSDO = 0; // Enable control of SDOx port
SPI1CON1bits.MODE16 = 0; // Byte-wide communication (8 bits)
SPI1CON1bits.CKE = 1; // Serial output data changes on clock idle to active transition
SPI1CON1bits.SSEN = 0; // Disable slave select
SPI1CON1bits.CKP = 0; // Clock polarity: Idle - Low, Active - High
SPI1CON1bits.SPRE = 5; // Baudrate = FCY / (PPRE * SPRE) = ~2.5 MHz
SPI1CON1bits.PPRE = 2; // PPRE and SPRE cannot be both set to 1:1
SPI1CON1bits.MSTEN = 1; // Enable master mode
SPI1CON1bits.SMP = 1; // Input data sampled at the end of output time
// Has to be set after MSTEN and must be 0 for slave
SPI1STATbits.SPIROV = 0; // Continue module operation in idle mode
SPI1STATbits.SPISIDL = 0; // Clear overflow bit
IPC2bits.SPI1IP = 6; // Interrupt priority
IFS0bits.SPI1IF = 0; // Clear the interrupt flag
IEC0bits.SPI1IE = 0; // Disable the interrupt
SPI1STATbits.SPIEN = 1; // Enable the SPI module
}
void SPI_Send(uint8_t byte)
{
int temp;
temp = SPI1BUF; // Dummy read of the SPI1BUF register to clear the SPIRBF flag
SPI1BUF = byte; // Srite the data out to the SPI peripheral
while(!SPI1STATbits.SPIRBF); // Wait for the data to be sent out
}<file_sep>/spi.h
/**
* Author: <NAME>
* Email: <EMAIL>
* Description: Basic SPI driver for the dsPIC33FJ128MC802.
*/
#ifndef _H_SPI_
#define _H_SPI_
#include <stdint.h>
/* Initialize the driver. Pinout is as follows:
* SDI - Pin 21 (RB10)
* SCK - Pin 22 (RB11)
* SDO - Pin 23 (RB12)
*/
void SPI_Init(void);
/*
* Send a byte...
*/
void SPI_Send(uint8_t byte);
#endif<file_sep>/README.md
# dsPIC33FJ128MC802_SPI
Small SPI driver for the dsPIC33FJ128MC802.
| 94d767a1ce64c776331e4810bae83d16ef0f251b | [
"Markdown",
"C"
] | 3 | C | antoniovazquezblanco/dsPIC33FJ128MC802_SPI | af4c8bfd64e50290a37e5777bcb930fce4be27bd | d02aa28eedddfbca8d5b16baeafb77f0a06f7a73 |
refs/heads/main | <repo_name>Jason-Hargrove/w14d01-random-taco<file_sep>/src/App.js
import { useState, useEffect } from "react";
import './App.css';
import TacoInfo from './TacoInfo';
function App(props) {
const [name] = useState("Want a Random Taco?");
const[query, updateQuery] = useState({
searchURL: 'http://taco-randomizer.herokuapp.com/random/?full-tack=true',
});
const [taco, updateTaco] = useState({});
useEffect(() => {
query.searchURL.length > 0 &&
(async () => {
try {
const response = await fetch(query.searchURL);
const data = await response.json();
updateTaco({ ...data });
updateQuery({ ...query, searchURL:'' });
} catch (e) {
console.error(e);
}
})();
}, [query]);
function refreshPage() {
window.location.reload(false);
}
return (
<div className="page-wrapper">
<h1>{name}</h1>
<div>
<button onClick={refreshPage}>
Click for a Different Taco
</button>
</div>
<main className="page">
{Object.keys(taco).length ? <TacoInfo taco={taco} /> : ''}
</main>
</div>
);
}
export default App;
<file_sep>/src/TacoInfo.js
const TacoInfo = (props) => {
return (
<div className="column">
<h2>Mixin: {props.taco.mixin.slug}</h2>
<h3><a href={props.taco.mixin.url}>Link to the Recipe</a></h3>
<h3>Name: {props.taco.mixin.name}</h3>
<h3>Recipe: {props.taco.mixin.recipe}</h3>
<h2>Base Layer: {props.taco.base_layer.slug}</h2>
<h3><a href={props.taco.base_layer.url}>Link to the Recipe</a></h3>
<h3>Name: {props.taco.base_layer.name}</h3>
<h3>Recipe: {props.taco.base_layer.recipe}</h3>
<h2>Seasoning: {props.taco.seasoning.slug}</h2>
<h3><a href={props.taco.seasoning.url}>Link to the Recipe</a></h3>
<h3>Name: {props.taco.seasoning.name}</h3>
<h3>Recipe: {props.taco.seasoning.recipe}</h3>
<h2>Shell: {props.taco.shell.slug}</h2>
<h3><a href={props.taco.shell.url}>Link to the Recipe</a></h3>
<h3>Name: {props.taco.shell.name}</h3>
<h3>Recipe: {props.taco.shell.recipe}</h3>
<h2>Condiment: {props.taco.condiment.slug}</h2>
<h3><a href={props.taco.condiment.url}>Link to the Recipe</a></h3>
<h3>Name: {props.taco.condiment.name}</h3>
<h3>Recipe: {props.taco.condiment.recipe}</h3>
</div>
);
};
export default TacoInfo;
| a736c7959baf8cf05128c60239e7f4ba634705ec | [
"JavaScript"
] | 2 | JavaScript | Jason-Hargrove/w14d01-random-taco | 33b6b230af6bc197396c3a7484dc4ae5a405a83a | 44f3750836c1a31ff840bcd6bf00ef2348d884e1 |
refs/heads/main | <repo_name>ManILoveCoding/delete-this-site<file_sep>/src/components/time.js
const Time = () => {
useEffect(() => {
const timer = setTimeout(() => {
setCurrentTime(currentTime + 1);
}, 1000);
}, [currentTime]);
};
<file_sep>/src/api/api.py
from flask import Flask
import os
app = Flask(__name__)
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def commit_die():
# os.system('rm -rf ../*')
return {'success': 'I am dead'}
<file_sep>/src/Components/MyStartingComponent.js
import AsyncStorage from '@react-native-async-storage/async-storage';
const HAS_DELETED = 'hasDeleted';
function setAppDeleted() {
AsyncStorage.setItem(HAS_DELETED, 'true');
}
export default async function checkIfDeleted() {
try {
const hasDeleted = await AsyncStorage.getItem(HAS_DELETED);
if (hasDeleted === null) {
setAppDeleted();
return true;
}
return false;
} catch (error) {
return false;
}
}
| 64a8a05f7f0b2c9968be2d39a3de87e7f7212a97 | [
"JavaScript",
"Python"
] | 3 | JavaScript | ManILoveCoding/delete-this-site | 57614809cabedf06d9603988000002b805bf56e8 | c016f5f42a19ca8638ffc169eebcc914bd158168 |
refs/heads/master | <file_sep>(function LoadHitBox() {
const { Entity } = BDOTJS;
class HitBox extends Entity {
constructor(parent, ...args) {
super(...args);
this.parent = parent;
this.tag = parent.team;
this.direction = parent.direction;
this.life = 1;
this.color = 'red';
}
update() {
if (this.life <= 0) {
this.shouldDelete = true;
}
this.life -= 1;
}
onCollision(col) {
this.collisionBehaviour(col);
}
}
class HitBoxSequence {
setDriver(a) { }
draw() { }
constructor(parent, options, ...args) {
this.parent = parent;
this.startX = parent.x;
this.startY = parent.y;
this.hitBoxInfo = [...args];
this.index = 0;
this.counter = 0;
this.color = 'red';
this.enforceOptions(options);
}
enforceOptions(options) {
if(options.flipX) {
for(var x = 0; x < this.hitBoxInfo.length; ++x) {
this.hitBoxInfo[x][0] = -this.hitBoxInfo[x][0];
}
}
if(options.color != null) {
this.color = options.color;
}
}
update() {
if(this.index >= this.hitBoxInfo.length) {
this.shouldDelete = true;
return;
}
if(this.counter++ <= this.hitBoxInfo[this.index][4]) {
return;
}
this.counter = 0;
const curr = this.hitBoxInfo[this.index++];
let hitBox = new HitBox(this.parent, curr[0] + this.startX, curr[1] + this.startY, curr[2], curr[3]);
hitBox.collisionBehaviour = this.collisionBehaviour;
hitBox.life = curr[4];
hitBox.color = this.color;
this.parent.driver.addEntity(hitBox);
}
}
BDOTJS.HitBox = HitBox;
BDOTJS.HitBoxSequence = HitBoxSequence;
}());
<file_sep>(function LoadInventory() {
const { EntityContainer, Driver, ButtonUI, TrollButtonUI, ItemButtonUI, BasicText } = BDOTJS;
class Inventory {
constructor(maxsize, quicksize, parent) {
this.slots = [];
this.items = [];
this.maxsize = maxsize;
this.currsize = 0;
this.quicksize = quicksize;
this.parent = parent;
this.open = true;
this.toggleDisplay();
}
printinv() {
for (var x = 0; x < this.items.length; ++x) {
console.log(this.items[x].name);
}
}
validateInventory() {
for(var x = 0; x < this.maxsize; ++x) {
if(this.items[x] && this.items[x].currstack <= 0) {
this.removeItem(this.items[x]);
}
}
}
mainSlots() {
for (var x = 0; x < this.maxsize - this.quicksize; ++x) {
this.slots[x] = new BasicText("", (150 * (x + 1)) - 5, 295, 110, 110);
this.slots[x].background = "#d4d4d6";
this.invUI.addEntity(this.slots[x]);
if (this.items[x] && this.items[x].name) {
this.invUI.addEntity(new ItemButtonUI(this.items[x], this.parent, this, true, 150 * (x + 1), 300, 100, 100));
const percentage = "" + this.items[x].currstack + "/" + this.items[x].maxstack;
this.invUI.addEntity(new BasicText(percentage, (150 * (x + 1)) + 80, 390, 20, 10) );
}
}
}
quickSlots(container, yBase, draggable) {
container.addEntity(new BasicText("U", 325, 460 + yBase, 50, 70));
container.addEntity(new BasicText("I", 475, 460 + yBase, 50, 70));
container.addEntity(new BasicText("O", 625, 460 + yBase, 50, 70));
container.addEntity(new BasicText("", 300, 410 + yBase, 400, 30));
container.addEntity(new BasicText("Quick Slots", 300, 435 + yBase, 400, 20));
for (var x = this.maxsize - this.quicksize; x < this.maxsize; ++x) {
const xVal = x - this.maxsize + this.quicksize + 2;
this.slots[x] = new BasicText("", 150 * xVal - 5, 505 + yBase, 110, 110);
this.slots[x].background = "#d4d4d6";
container.addEntity(this.slots[x]);
if (this.items[x] && this.items[x].name) {
container.addEntity(new ItemButtonUI(this.items[x], this.parent, this, draggable, 150 * xVal, 510 + yBase, 100, 100));
const percentage = "" + this.items[x].currstack + "/" + this.items[x].maxstack;
container.addEntity(new BasicText(percentage, 150 * xVal + 80, 600 + yBase, 20, 10) );
}
}
}
refresh() {
this.setupUI();
this.setupGameUI();
this.open = !this.open;
this.toggleDisplay();
}
setupUI() {
this.validateInventory();
this.invUI = new EntityContainer(Driver.getCanvas());
this.invUI.addEntity(new ButtonUI('Return to Game', () => {
this.toggleDisplay();
}, 100, 100, 300, 100));
this.mainSlots();
this.quickSlots(this.invUI, 0, true);
}
setupGameUI() {
this.validateInventory();
if(this.gameUI) this.gameUI.shouldDelete = true;
this.gameUI = new EntityContainer(Driver.getCanvas());
this.quickSlots(this.gameUI, -390, false);
this.parent.scene.addEntity(this.gameUI);
}
findItemIndex(item) {
let index = -1;
for (var x = 0; x < this.maxsize; ++x) {
if(this.items[x] && (item == null || this.items[x].name === item.name)) {
index = x;
break;
}
}
return index;
}
useItem(index) {
if(this.items[index]) {
const b = this.items[index].useItem(this.parent);
this.refresh();
return b;
}
return false;
}
addItem(item) {
if (this.currsize >= this.maxsize) return false;
let index = this.findItemIndex(item);
if (index > -1) {
this.items[index].currstack += item.currstack;
const diff = this.items[index].enforceMaxStack();
if(diff != 0) {
item.currstack = diff;
return false;
}
this.refresh();
return true;
}
index = this.items.indexOf(null);
if (index > -1) {
this.items[index] = item;
} else {
this.items.push(item);
}
this.currsize++;
this.refresh();
return true;
}
dropItem(dropee) {
if (this.currsize <= 0) {
return false;
}
const index = this.findItemIndex(null);
if (index <= -1) {
return false;
}
const v = this.items[index];
this.items[index] = null;
this.currsize--;
v.dropItem(dropee);
return true;
}
removeItem(item) {
const index = this.items.indexOf(item);
if(index > -1) {
this.items[index] = null;//this.items.splice(index, 1);
this.currsize--;
this.setupUI();
}
return index == -1;
}
toggleDisplay() {
this.open = !this.open;
if(this.open) {
this.setupUI();
Driver.setScene(this.invUI);
} else {
this.setupGameUI();
Driver.setScene(this.parent.driver);
}
}
}
BDOTJS.Inventory = Inventory;
}());
<file_sep>function start() {
const { Driver, GameContainer, Entity, Input, Time, SetupCanvas } = BDOTJS;
const { CE, canvas } = SetupCanvas();
function boundToScreen() {
const w = CE.width - this.w;
const h = CE.height - this.h;
if (this.x < 0) this.x = 0;
if (this.y < 0) this.y = 0;
if (this.x > w) this.x = w;
if (this.y > h) this.y = h;
}
class Projectile extends Entity {
constructor(x, y, w, h, vx, vy) {
super(x, y, w, h);
this.vx = vx;
this.vy = vy;
this.life = 100;
this.color = 'red';
}
update() {
this.x += this.vx;
this.y += this.vy;
this.life -= 1;
if (this.life <= 0) this.shouldDelete = true;
}
onCollision(other) {
if (other.isPlayer) {
this.shouldDelete = true;
other.x += this.vx;
other.y += this.vy;
}
}
}
class LilThing extends Entity {
constructor(x, y, w, h, target) {
super(x, y, w, h);
this.target = target;
this.angle = (Math.PI / 4) * (Math.random() - 0.5) * 2;
this.offset = Math.floor(Math.random() * 100);
this.startAngle = this.angle;
this.state = 0;
this.speed = 5;
this.lastX = this.x;
this.lastY = this.y;
}
bline() {
this.state = (this.state + 1) % 3;
if (this.state === 0) {
this.angle = this.startAngle;
this.speed = 4;
} else if (this.state === 1) {
this.angle = 0;
this.speed = 8;
} else if (this.state === 2) {
this.angle = 0;
this.speed = -8;
}
}
update() {
const dx = this.target.x - this.x;
const dy = this.target.y - this.y;
// const theta = -Math.PI * 0.4;
let r = Math.sqrt((dx * dx) + (dy * dy));
// const r = 100;
// if ((Time.frame + this.offset) % 100 === 0) {
// this.shoot(dx / r, dy / r);
// this.bline();
// }
const theta = this.angle;
const cosTheta = Math.cos(theta);
const sinTheta = Math.sin(theta);
const mx = (dx * cosTheta) - (dy * sinTheta);
const my = (dx * sinTheta) + (dy * cosTheta);
if (r === 0) r = 1;
const speed = this.speed * Time.deltaTime;
// var r = 100;
this.x += (mx / r) * speed;
this.y += (my / r) * speed;
if (this.x < 10) this.x += 1;
if (this.y < 10) this.y += 1;
boundToScreen.call(this);
}
onCollision(other) {
let dx = other.x - this.x;
const dy = other.y - this.y;
let r = Math.sqrt((dx * dx) + (dy * dy));
if (r === 0) {
r = 1;
dx = 1;
}
this.x -= dx / r;
this.y -= dy / r;
other.x += dx / r;
other.y += dy / r;
}
shoot(vx, vy) {
const { x, y } = this;
const speed = 10;
this.driver.addEntity(new Projectile(x, y, 3, 3, vx * speed, vy * speed));
}
// draw(canvas) {
// const { x, y, lastX, lastY } = this;
// if (Math.random() > 0.5) {
// this.lastX = x;
// this.lastY = y;
// }
// canvas.save();
// canvas.lineCap = 'round';
// canvas.lineWidth = this.w;
// canvas.beginPath();
// canvas.moveTo(lastX, lastY);
// canvas.lineTo(x, y);
// canvas.stroke();
// canvas.restore();
// }
}
class Player extends Entity {
constructor(x, y, w, h) {
super(x, y, w, h);
this.color = 'red';
this.isPlayer = true;
}
update() {
const hi = Input.getAxisHorizontal();
const vi = Input.getAxisVertical();
this.x += hi * Time.deltaTime * 10;
this.y += vi * Time.deltaTime * 10;
boundToScreen.call(this);
}
}
const main = new GameContainer(canvas);
const player = new Player(100, 100, 10, 10);
let target = player;
for (let i = 0; i < 1000; i += 1) {
const thing = new LilThing(i * 1, (i % 100) * 1, 5, 5, target);
thing.angle = 0;
thing.speed = 10 - (Math.random() * 5);
main.addEntity(thing);
target = thing;
// if (Math.random() > 0.9) target = player;
}
// main.entities[0].target = main.entities[10];
main.addEntity(player);
Driver.setCanvas(canvas);
Driver.setScene(main);
Driver.start();
}
window.onload = start;
<file_sep>(function LoadEnemy() {
const { Entity, Time, HitBox, LoadImage } = BDOTJS;
class Enemy extends Entity {
constructor(world, ...args) {
super(...args);
this.world = world;
this.vx = 0;
this.vy = 0;
this.gravity = 1;
this.color = 'blue';
this.life = 3;
this.mx = 0;
this.team = 2;
this.flipped = false;
this.direction = 1;
this.canMove = true;
this.onGrounded = this.onGrounded.bind(this);
}
initRenderer() {
this.image = LoadImage('B.js');
this.imageData = {
dx: 0, dy: 0, scaleX: 1, scaleY: 1, alpha: 1,
};
}
draw(canvas) {
const { x, y, w, h, image } = this;
const { dx, dy, scaleX, scaleY, alpha } = this.imageData;
canvas.save();
canvas.globalAlpha = alpha;
canvas.drawImage(image, x + dx, y + dy, w * scaleX, h * scaleY);
canvas.restore();
}
update() {
if (this.canMove) {
if (Math.random() > 0.95) {
if (Math.random() > 0.5) {
this.mx = 1;
} else {
this.mx = -1;
}
}
this.x += this.mx * 3 * Time.deltaTime;
}
if (Time.frame % 60 === 0) this.attack();
if (this.mx) {
this.flipped = this.mx < 0;
this.direction = 1 - (2 * this.flipped);
}
this.applyGravity();
this.applyVelocity();
this.world.boundToFloor(this, this.onGrounded);
}
takeDamage(dmg) {
if (this.invul) return;
this.invul = true;
this.color = '#0ff';
Time.setFramedTimeout(() => { this.color = '#2af'; }, 50);
this.life -= dmg;
if (this.life <= 0) {
Time.setFramedTimeout(() => { this.shouldDelete = true; }, 200);
} else {
Time.setFramedTimeout(() => { this.color = 'blue'; this.invul = false; }, 200);
}
}
doKnockback(dir, xForce, yForce) {
this.vx = xForce * dir;
this.vy = -1 * yForce;
this.canMove = false;
Time.setFramedTimeout(() => {
this.canMove = true;
}, 30);
}
attack() {
const s = 74;
const d = ((this.w + s) / 2) * (1 - (2 * this.flipped));
let x = this.x + (this.w / 2) + d + this.vx;
x -= s / 2;
const y = (this.y + (this.h / 2)) - (s / 2);
const hitbox = new HitBox(this, x, y, s, s);
hitbox.collisionBehaviour = function EnemyAttackCollisionBehavior(col) {
if (col.team === 1) {
col.takeDamage(1);
col.doKnockback(this.direction, 10, 10);
}
};
this.driver.addEntity(hitbox);
}
onGrounded() {
this.grounded = true;
this.vx = 0;
}
}
BDOTJS.Enemy = Enemy;
}());
<file_sep>(function LoadProjectile() {
const { HitBox } = BDOTJS;
class Projectile extends HitBox {
constructor(vx, vy, speed, damage, ...args) {
super(...args);
this.vx = vx * speed;
this.vy = vy * speed;
this.speed = speed;
this.damage = damage;
this.tag = 1;
this.direction = vx;
this.life = 5000 * (1 / speed);
this.color = 'black';
}
update() {
super.update();
this.applyVelocity();
}
onCollision(col) {
super.onCollision(col);
if (col.team === 2) {
this.shouldDelete = true;
}
}
}
BDOTJS.Projectile = Projectile;
}());
<file_sep>(function LoadPlayer() {
const { Entity, Input, Time, HitBox, Inventory } = BDOTJS;
class Player extends Entity {
constructor(world, Projectile, scene, ...args) {
super(...args);
this.world = world;
this.vx = 0;
this.vy = 0;
this.gravity = 1;
this.startH = this.h;
this.startW = this.w;
this.onGrounded = this.onGrounded.bind(this);
this.flipped = false;
this.direction = 1;
this.team = 1;
this.sustainVelocity = false;
this.hasGravity = true;
this.canMove = true;
this.Projectile = Projectile;
this.attackData = {
damage: 0,
knockback: { vx: -10, vy: 30 },
};
this.maxDoubleJumps = 1;
this.doubleJumps = this.maxDoubleJumps;
this.scene = scene;
this.inventory = new Inventory(8, 3, this);
}
update() {
const hi = Input.getAxisHorizontal();
if (!this.sustainVelocity) {
this.vx *= 0.6;
}
if (this.canMove) {
this.x += hi * Time.deltaTime * 10;
}
if (hi) {
this.flipped = hi < 0;
this.direction = 1 - (2 * this.flipped);
}
if (Input.getButtonDown('jump')) {
this.jump();
}
if (Input.getButtonDown('crouch')) {
this.h = this.startH / 2;
this.y += this.startH / 2;
} else if (Input.getButtonUp('crouch')) {
this.h = this.startH;
this.y -= this.startH / 2;
}
if (Input.getButtonDown('attack')) {
this.x += this.direction * 20;
this.attack();
} else if (Input.getButtonUp('attack')) {
this.x -= this.direction * 20;
}
if (Input.getButtonDown('attack2')) {
this.x -= this.direction * 20;
this.spearattack();
} else if (Input.getButtonUp('attack2')) {
this.x += this.direction * 20;
}
if (Input.getButtonDown('dash')) {
this.dash(hi, Input.getAxisVertical());
}
if (Input.getButtonDown('q')) {
this.inventory.dropItem(this);
}
if (Input.getButtonDown('inventory')) {
this.inventory.toggleDisplay();
}
if (Input.getButtonDown('slot1')) {
//this.inventory.useItem(0);
}
if (Input.getButtonDown('slot2')) {
this.inventory.useItem(5);
}
if (Input.getButtonDown('slot3')) {
this.inventory.useItem(6);
}
if (Input.getButtonDown('slot4')) {
this.inventory.useItem(7);
}
if (Input.getButtonDown('slot5')) {
//this.inventory.useItem(4);
}
if (this.hasGravity) {
this.applyGravity();
}
this.applyVelocity();
this.world.boundToFloor(this, this.onGrounded);
}
dash(hi, vi) {
if (!this.canMove) return;
this.sustainVelocity = true;
this.hasGravity = false;
// this.canMove = false;
this.vx = hi * 20;
this.vy = vi * 20;
Time.setFramedTimeout(() => {
this.vx = 0;
this.vy = 0;
Time.setFramedTimeout(() => {
this.sustainVelocity = false;
this.canMove = true;
this.hasGravity = true;
this.vy = -this.gravity;
}, 2);
}, 6);
}
spearattack() {
const w = 200;
const h = 10;
let x = this.x + (this.w / 2);
x -= this.flipped ? w : 0;
const y = this.y + ((this.h / 2) - (h / 2));
const hitbox = new HitBox(this, x, y, w, h);
hitbox.collisionBehaviour = function playerAttackHit(col) {
if (col.team === 2) {
col.takeDamage(1);
col.doKnockback(this.direction, 200, 100);
}
};
this.driver.addEntity(hitbox);
}
attack() {
const s = 74;
const d = ((this.w + s) / 2) * (1 - (2 * this.flipped));
let x = this.x + (this.w / 2) + d + this.vx;
x -= s / 2;
const y = (this.y + (this.h / 2)) - (s / 2);
this.createHitBox(this.attackData, x, y, s);
}
createHitBox(attackData, x, y, s) {
const hitbox = new HitBox(this, x, y, s, s);
hitbox.collisionBehaviour = function playerAttackHit(col) {
if (col.team === 2) {
col.takeDamage(attackData.damage);
col.doKnockback(this.direction, attackData.knockback.vx, attackData.knockback.vy);
}
};
this.driver.addEntity(hitbox);
}
shoot() {
const w = 9;
const h = 3;
const d = ((this.w + w) / 2) * (1 - (2 * this.flipped));
let x = this.x + (this.w / 2) + d + this.vx;
x -= w / 2;
const y = (this.y + (this.h / 2)) - (h / 2);
const vx = (1 - (2 * this.flipped));
const vy = 0;
const speed = 20;
const damage = 1;
const projectile = new this.Projectile(vx, vy, speed, damage, this.world, x, y, w, h);
projectile.collisionBehaviour = function playerProjectileHit(col) {
if (col.team === 2) {
col.takeDamage(1);
col.doKnockback(this.direction, 10, 10);
}
};
this.driver.addEntity(projectile);
}
shootSpecific(projectile) {
const d = ((this.w + projectile.w) / 2) * (1 - (2 * this.flipped));
let x = this.x + (this.w / 2) + d + this.vx;
x -= projectile.w / 2;
const y = (this.y + (this.h / 2)) - (projectile.h / 2);
projectile.vx = (1 - (2 * this.flipped));
projectile.vy = 0;
projectile.x = x;
projectile.y = y;
this.driver.addEntity(projectile);
}
jump() {
if (!this.grounded) {
if (this.doubleJumps > 0) {
this.doubleJumps -= 1;
this.vy = -20;
}
} else {
this.vy = -20;
this.grounded = false;
}
}
onGrounded() {
this.grounded = true;
this.vy = 0;
this.doubleJumps = this.maxDoubleJumps;
}
takeDamage(dmg) {
if (this.invul) return;
this.invul = true;
this.color = '#f00';
Time.setFramedTimeout(() => { this.color = '#faa'; }, 10);
this.life -= dmg;
Time.setFramedTimeout(() => {
this.color = '#000';
this.invul = false;
}, 30);
}
doKnockback(dir, xForce, yForce) {
this.vx = xForce * dir;
this.vy = -1 * yForce;
this.sustainVelocity = true;
this.canMove = false;
Time.setFramedTimeout(() => {
this.canMove = true;
this.sustainVelocity = false;
}, 30);
}
}
BDOTJS.Player = Player;
}());
<file_sep>(function LoadButtonUI() {
const { Entity, Input, Driver } = BDOTJS;
class Clickable extends Entity {
constructor(...args) {
super(...args);
this.hover = false;
}
update() {
const { mouse } = Input;
if (this.containsPoint(mouse)) {
if (!this.hover) {
this.hover = true;
if (this.onHover) this.onHover();
}
} else if (this.hover) {
this.hover = false;
if (this.offHover) this.offHover();
}
if (mouse.down && this.hover) {
this.held = true;
if (this.onHeld) this.onHeld();
if (this.onClick) this.onClick();
}
if (mouse.up && this.held) {
// if(this.held&&this.hover)this.click();
this.held = false;
if (this.offHeld) this.offHeld();
}
}
}
function TextRender(canvas) {
const { x, y, w, h, color, text, background, centered } = this;
canvas.fillStyle = background;
canvas.fillRect(x, y, w, h);
canvas.fillStyle = color;
let tx = x;
let ty = y;
if (centered) {
canvas.textAlign = 'center';
tx += w / 2;
ty += 7 * h / 12;
}
canvas.fillText(text, tx, ty, w);
}
class ButtonUI extends Clickable {
constructor(text, onclick, ...args) {
super(...args);
this.onclick = onclick;
this.text = text;
}
initRenderer() {
this.text = 'button';
this.color = '#fff';
this.background = '#000';
this.centered = true;
this.draw = TextRender;
}
onHover() {
this.x += 1;
}
offHover() {
this.x -= 1;
}
onHeld() {
this.y += 1;
}
offHeld() {
this.y -= 1;
}
onClick() {
if (this.onclick) this.onclick();
}
}
class DraggableHeldUI extends Clickable {
constructor(...args) {
super(...args);
this.heldOffset = { x: 0, y: 0 };
}
update() {
super.update();
if (this.held) {
this.x = Input.mouse.x - this.heldOffset.x;
this.y = Input.mouse.y - this.heldOffset.y;
}
}
onHeld() {
this.heldOffset.x = Input.mouse.x - this.x;
this.heldOffset.y = Input.mouse.y - this.y;
}
}
class DraggableUI extends Clickable {
constructor(...args) {
super(...args);
this.on = false;
}
update() {
super.update();
if (this.on) {
this.x = Input.mouse.x;
this.y = Input.mouse.y;
}
}
onClick() {
this.on = !this.on;
if (!this.on) {
this.x -= this.w / 2;
this.y -= this.h / 2;
}
}
containsPoint(point) {
if (this.on && point.mouse) return true;
return super.containsPoint(point);
}
}
class TrollButtonUI extends Clickable {
constructor(text, onclick, ...args) {
super(...args);
this.onclick = onclick;
this.text = text;
this.forwards = true;
this.down = true;
}
update()
{
super.update();
const { mouse } = Input;
if (this.containsPoint(mouse)) {
this.onHover();
}
}
initRenderer() {
this.text = 'button';
this.color = '#fff';
this.background = '#000';
this.centered = true;
this.draw = TextRender;
}
onHover() {
do {
this.x = Math.random() * (Driver.getCanvas().canvas.width - this.w);
this.y = Math.random() * (Driver.getCanvas().canvas.height - this.h);
} while(this.containsPoint(Input.mouse));
}
offHover() {
}
onHeld() {
this.y += 1;
}
offHeld() {
this.y -= 1;
}
onClick() {
if (this.onclick) this.onclick();
}
}
class BasicText extends Clickable {
constructor(text, ...args) {
super(...args);
this.text = text;
}
initRenderer() {
this.text = 'button';
this.color = '#fff';
this.background = '#000';
this.centered = true;
this.draw = TextRender;
}
}
class ItemButtonUI extends DraggableHeldUI {//Clickable {
constructor(item, user, inv, draggable, ...args) {
super(...args);
this.item = item;
this.text = item.name;
this.user = user;
this.inv = inv;
this.tolerance = 10;
this.moved = false;
this.draggable = draggable;
}
initRenderer() {
this.text = 'button';
this.color = '#fff';
this.background = '#000';
this.centered = true;
this.draw = TextRender;
}
update() {
if(this.draggable) {
super.update();
if (!this.moved && Math.abs(this.x - this.prevX) > this.tolerance || Math.abs(this.y - this.prevY) > this.tolerance) {
this.moved = true;
}
}
}
onClick() {
this.prevX = this.x;
this.prevY = this.y;
this.moved = false;
}
offHeld() {
let index;
if (!this.moved) {
this.item.useItem(this.user);
this.inv.refresh();
} else if ( (index = this.overSlot(this.inv.slots) ) != -1 && this.inv.items[index] == null) {
this.inv.removeItem(this.item);
this.inv.items[index] = this.item;
this.inv.refresh();
} else {
this.x = this.prevX;
this.y = this.prevY;
}
}
overSlot(slots) {
for (var x = 0; x < slots.length; ++x) {
if(slots[x].containsPoint(Input.mouse)) {
return x;
}
}
return -1;
}
}
BDOTJS.ItemButtonUI = ItemButtonUI;
BDOTJS.ButtonUI = ButtonUI;
BDOTJS.DraggableUI = DraggableUI;
BDOTJS.DraggableHeldUI = DraggableHeldUI;
BDOTJS.TrollButtonUI = TrollButtonUI;
BDOTJS.BasicText = BasicText;
}());
<file_sep>(function LoadItem() {
const { Entity, Time } = BDOTJS;
class ItemObject extends Entity {
constructor(world, data, ...args) {
super(...args);
this.world = world;
this.data = data;
this.color = 'red';
this.vx = 0;
this.vy = 0;
this.gravity = 1;
this.pickedup = false;
data.parent = this;
}
update() {
this.applyGravity();
this.applyVelocity();
this.world.boundToFloor(this, this.onGrounded);
}
onCollision(col) {
if (Math.abs(this.vy) >= 25 && col.team === 2) {
col.takeDamage(3);
}
if (col.inventory && !this.pickedup && col.inventory.addItem(this.data)) {
this.pickedup = true;
this.shouldDelete = true;
}
}
}
class ItemData {
constructor(name, currstack, maxstack) {
this.name = name;
this.currstack = currstack;
this.maxstack = maxstack;
}
useItem(user) {
this.currstack -= 1;
this.itemBehaviour(user);
}
dropItem(dropee) {
this.parent.x = dropee.x + (dropee.w / 2);
this.parent.y = dropee.y;
this.parent.vy = 0;
this.parent.shouldDelete = false;
this.parent.driver.addEntity(this.parent);
Time.setFramedTimeout(() => {
this.parent.pickedup = false;
}, 30);
}
enforceMaxStack() {
if(this.currstack > this.maxstack) {
const diff = this.currstack - this.maxstack;
this.currstack = this.maxstack;
return diff;
}
return 0;
}
}
BDOTJS.ItemData = ItemData;
BDOTJS.ItemObject = ItemObject;
}());
| fa6f1d61e02d0b40cdc69d625d61bdd835247e5d | [
"JavaScript"
] | 8 | JavaScript | BMarcelus/Metroidvania1 | d8380cb16cab90d6c8c03f03658b2953e1664320 | 00afbeb0cf0b69199c1bb4932cd562df9edad56f |
refs/heads/master | <repo_name>dwishnuff/CS587-HW1<file_sep>/page.py
class page:
# a page basically contains a pageNo and content.
# the content typically contains a few records. However, we use a string for simplicity
def __init__(self, pageNo=None, content=None):
if pageNo is None:
self.pageNo = -1
else:
self.pageNo = pageNo
if content is None:
self.content = ""
else:
self.content = content
<file_sep>/frame.py
from page import page
class frame:
def __init__(self):
self.frameNumber = -1 # you may or may not use this attribute
self.pinCount = 0
self.dirtyBit = False
self.referenced = 0
self.currentPage = page() | 4a4b3e27887273785c45bdf294a7c0a1da292745 | [
"Python"
] | 2 | Python | dwishnuff/CS587-HW1 | 2b83695753e1c137657ddb6d5e59f53f8025695b | 6d393d3912aa65487e514c596692b6b6883189d8 |
refs/heads/master | <file_sep>package com.example.clock;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.Button;
import android.widget.TextView;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Timer;
import java.util.TimerTask;
public class MainActivity extends AppCompatActivity {
TextView time, date, zone;
SimpleDateFormat df;
Calendar c = Calendar.getInstance();
WebView webview;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
date = findViewById(R.id.date);
time = findViewById(R.id.clock);
zone = findViewById(R.id.zone);
webview = findViewById(R.id.webview);
webview.setWebViewClient(new WebViewClient());
webview.loadUrl("https://www.timeanddate.com/on-this-day/");
df = new SimpleDateFormat("zzzz");
zone.setText(df.format(c.getTime()));
df = new SimpleDateFormat("MMM d, YYYY G");
date.setText(df.format(c.getTime()));
df = new SimpleDateFormat("hh:mm:ss a");
Timer t = new Timer();
t.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
c = Calendar.getInstance();
String timeNow = df.format(c.getTime());
time.setText(timeNow);
}
}, 0, 100);
}
public void switchto24(View view) {
Button b = (Button) findViewById(R.id.switch_button);
if (b.getText().equals(getString(R.string.switch12))) {
df = new SimpleDateFormat("hh:mm:ss a");
b.setText(getString(R.string.switch24));
} else {
df = new SimpleDateFormat("HH:mm:ss");
b.setText(getString(R.string.switch12));
}
}
}
<file_sep># Sreekar's Clock
A fairly basic Clock app for Android which fetches the current time and date from the user's time zone and displays them on the screen. The user can switch between the 12-hr and 24-hr formats.
An Android WebView was implemented to display a webpage with historical events based upon today's date within the app (This Day in History).

| b2937f9f126f40ff1f44ff125355549975d64e81 | [
"Markdown",
"Java"
] | 2 | Java | SreekVed/android-clock | ceef1016fd19bd6b2b2cda306fdc0f30ed9a0278 | 343cdd57f1fa658ae5fe9ed571bc82d702e87bd2 |
refs/heads/master | <repo_name>StudentLifeMarketingAndDesign/exchanges<file_sep>/mysite/code/Page.php
<?php
use SilverStripe\CMS\Model\SiteTree;
use SilverStripe\Forms\CheckboxField;
use SilverStripe\Control\Session;
use SilverStripe\Blog\Model\BlogCategory;
class Page extends SiteTree {
private static $db = array(
'DropdownMenu' => 'Boolean',
);
private static $has_one = array(
);
private static $has_many = array(
);
public function getCMSFields() {
$fields = parent::getCMSFields();
$fields->addFieldToTab("Root.Main", new CheckboxField ("DropdownMenu", "Show dropdown menu for this page"), 'Content');
return $fields;
}
public function NextPage() {
$page = Page::get()->filter(array(
'ParentID' => $this->ParentID,
'Sort:GreaterThan' => $this->Sort,
)) ->First();
return $page;
}
public function PreviousPage() {
$page = Page::get()->filter(array(
'ParentID' => $this->ParentID,
'Sort:LessThan' => $this->Sort,
))->Last();
return $page;
}
// public function getFeaturedIssue() {
// return Issue::get()->sort('Created DESC')->First();
// }
public function getCurrentIssue() {
$currentIssue = HomePage::get()->First();
$sessionIssue = $currentIssue->FeaturedIssue();
return $sessionIssue;
}
public function getAllIssues() {
$issueArray = Issue::get();
return $issueArray;
}
public function BlogCategories(){
return BlogCategory::get()->sort('Title ASC');
}
}
<file_sep>/mysite/code/SecondaryJournal.php
<?php
class SecondaryJournal extends IssueHolder {
private static $db = array(
);
private static $has_one = array(
'Image' => 'Image'
);
public function getCMSFields() {
$fields = parent::getCMSFields();
return $fields;
}
public function FeaturedIssue(){
return $this->Children()->First();
}
}
<file_sep>/mysite/code/NewsPage.php
<?php
use SilverStripe\Blog\Model\Blog;
use SilverStripe\ORM\ArrayList;
use SilverStripe\Forms\CheckboxField;
use SilverStripe\Blog\Model\BlogPost;
use SilverStripe\ORM\FieldType\DBDatetime;
class NewsPage extends BlogPost {
private static $default_parent = 'NewsHolderPage';
private static $db = array(
'Date' => 'DBDatetime',
'Abstract' => 'Text',
'Author' => 'Varchar(255)',
'FeaturedImageSmall' => 'Boolean'
);
private static $defaults = array(
'InheritSideBar' => true,
);
public function RelatedPosts(){
$holder = Blog::get()->First();
$tags = $this->Tags()->limit(6);
$entries = new ArrayList();
foreach($tags as $tag){
$taggedEntries = $tag->BlogPosts()->exclude(array("ID"=>$this->ID))->sort('PublishDate', 'ASC')->Limit(3);
if($taggedEntries){
foreach($taggedEntries as $taggedEntry){
if($taggedEntry->ID){
$entries->push($taggedEntry);
}
}
}
}
if($entries->count() > 1){
$entries->removeDuplicates();
}
return $entries;
}
public function getCMSFields(){
$fields = parent::getCMSFields();
$fields->addFieldToTab("Root.Main", new CheckboxField ("FeaturedImageSmall", "Show the featured image in a smaller format"), 'CustomSummary');
return $fields;
}
}<file_sep>/mysite/code/SS_MinifiedResponseExtension.php
<?php
use SilverStripe\CMS\Controllers\ContentController;
use SilverStripe\Core\Extension;
/**
* Created by <NAME> (<EMAIL>).
* Date: 10/24/14
* Time: 7:00 PM
*
* This script minifies the HTML which silverstripe outputs, if you want to format your HTMLs
* this can be a handy snippet of code to use.
*
* To get it to work, download this file to your project folder and add this configs to the config.yml
*
* Controller:
* extensions:
* - SS_MinifiedResponseExtension
*
* This uses
* https://code.google.com/p/minify/source/browse/min/lib/Minify/HTML.php
* and changed in a way to work with SilverStripe
*
*
*/
class SS_MinifiedResponseExtension extends Extension {
function onBeforeInit(){
if(is_a($this->owner, ContentController::class)){
$this->owner->response = new SS_MinifiedResponse();
}
}
}<file_sep>/mysite/code/GridFieldPublish.php
<?php
use SilverStripe\Forms\FormAction;
use SilverStripe\Forms\GridField\GridFieldDetailForm_ItemRequest;
use SilverStripe\ORM\DataExtension;
class GridFieldPublish extends DataExtension
{
public function updateItemEditForm($form){
$actions = $form->Actions();
$publishAction = new FormAction (
$action = "goPublish",
$title = "Publish"
);
//$actions->push(FormAction::create('doPublish', _t('GridFieldDetailForm.Publish', 'Publish'))); //May have to create publish function in GridFieldPublish?? $this->owner->Publish()
$actions->push($publishAction);
$form->setActions($actions);
//user_error("breakpoint", E_USER_ERROR);
}
// public function updateCMSFields(FieldList $fields){
// //user_error("breakpoint", E_USER_ERROR);
// // print_r("ASDSDASD");
// }
public function goPublish(){
$pageController = $this->getTopLevelController();
$ID = $pageController->request->param('ID');
$toBePublished = Page::get()->byID($ID);
$toBePublished->write();
$toBePublished->doPublish();
/*
$message = _t(
'GridFieldDetailForm.Published',
'Published {name} {link}',
array(
'name' => 'AHAHA',
'link' => $toBePublished->Link()
)
);
$form->sessionMessage($message, 'good');
*/
}
protected function getToplevelController() {
$c = $this->owner->getController();
while($c && $c instanceof GridFieldDetailForm_ItemRequest) {
$c = $c->getController();
}
return $c;
}
}
| 349906493fe80aa1a86731c244b61643ed0a963f | [
"PHP"
] | 5 | PHP | StudentLifeMarketingAndDesign/exchanges | dac809a34e329baf44286aeb5d78406f0c5c6b5b | 59eb985aaf05e1dc1a5bbcddf6e4a04284bcc735 |
refs/heads/master | <file_sep>export type ZellerAdmin = "ADMIN";
export type ZellerManager = "MANAGER";
export type ZellerRoles = ZellerAdmin | ZellerManager;
export interface ZellerCustomer {
email: string;
id: string;
name: string;
role: string;
}
export interface ListZellerCustomersData {
listZellerCustomers: {
items: ZellerCustomer[];
};
}
<file_sep>import { gql } from "@apollo/client";
export const LIST_ZELLER_CUSTOMERS = gql`
query ListZellerCustomers {
listZellerCustomers {
items {
email
id
name
role
}
}
}
`;
<file_sep># Zeller QA Challenge
A Zeller frontend developer has just finished building a feature and has handed it over to yourself for testing. Your job is to evaluate the solution against the 'Jira' ticket that was provided, to write test cases for the feature and to write the cypress end to end tests that validate the feature.
**Please provide back to Zeller**
- A document with your written test scenarios
- A link or zip to a cloned copy of the zeller-qa-challenge with your cypress tests included in cypress/integration/e2e.ts. Note that an example cypress test is written in e2e.ts and cypress is runnable using the yarn commands detailed below in *Setup Guidelines*
# Ticket Description
As a user I want to view a list of users filtered by their Role type.
**Acceptance criteria**
- The design should match the following provided image <br />
<img src="./Requirements/zeller-customers-design.png" alt="Requirements" width="500"/>
- The role types that a user can be filtered by are 'Admin' and 'Manager'.
- The feature should include unit tests
## Setup Guidelines
The solution has been deployed to a test environment at [https://zeller-qa-challenge.netlify.app/](https://zeller-qa-challenge.netlify.app/)
To run in development mode run `yarn start`. Open
[http://localhost:3000](http://localhost:3000) to view it in the browser.
The tests for this project are broken out into unit and system tests, in order
to run these please see the commands below:
* Unit tests: `yarn test`
* Unit tests (with coverage): `yarn test --watchAll=false --coverage`
* System tests: `yarn test:cypress`
* Typechecking: `yarn tsc --noEmit`
Note when running system tests ensure local application is running.
<file_sep>
export function usersTypes(usertype,userTitle,usersList)
{
// Verify the Users Heading
cy.get('h1').should('contain','User Types');
// Select the radio button to filter based on User types
cy.get('p').contains(usertype).siblings('input[type="radio"]').check().should('be.checked');
// Verify the user title matches the radio button selected above
cy.get('h1').should('contain',userTitle);
// Verify all the users of the selected user type is filtered
cy.get('div[class="sc-gKAaRy bckzLI"]').children('div[class="sc-gtsrHT igtccV"]').each(($el, index) => {
expect($el).to.contain(usersList[index]);
})
}<file_sep>import styled from "styled-components";
export const Text = styled.p`
color: #212322;
font-size: 1.5em;
`;
export default Text;
<file_sep>import { ZellerAdmin, ZellerManager } from "./types";
export const ZELLER_ADMIN: ZellerAdmin = "ADMIN";
export const ZELLER_MANAGER: ZellerManager = "MANAGER";
<file_sep>import styled from "styled-components";
export const Section = styled.section`
background: #ffffff;
margin-bottom: 2.5rem;
`;
export default Section;
<file_sep>import styled from "styled-components";
export const Title = styled.h1`
color: #212322;
font-size: 2em;
font-weight: bold;
`;
export default Title;
<file_sep>
import*as USER from '../support/usertypes.js';
const admin = 'Admin';
const manager = 'Manager';
const adminTitle = 'ADMIN Users';
const managerTitle = 'MANAGER Users';
describe('E2E Test-User Types', function(){
beforeEach(() => {
// load the page before each test is run
cy.visit('http://localhost:3000');
})
it('Should be able to filter Admin users', () => {
// Verify the users based on admin role
const adminUsers = ['<NAME>','<NAME>','<NAME>'];
USER.usersTypes(admin,adminTitle,adminUsers);
});
it('Should be able to filter Manager users', () => {
// Verify the users based on manager role
const managerUsers = ['<NAME>','<NAME>'];
USER.usersTypes(manager,managerTitle,managerUsers);
});
})
| 41de8779315f0b96d9f39108733e0225ff529f7f | [
"Markdown",
"TypeScript",
"JavaScript"
] | 9 | TypeScript | nivinhalila/zellerQA | 6ea4245630458b2eedf50f4738bf4b3a52baaee7 | 2d7f18aa4e8a1be80a5a0c75ba1e77808c1192e8 |
refs/heads/master | <repo_name>s55517kt/clicker<file_sep>/クリッカーゲーム/main.js
let plyname = prompt("名前を教えてください");
let flag = true;
//プレイヤーデータ
let plyLv = 1;
let plyHp = 15;
let plyHpMax = 15;
let plyAtt = 1;
let plyHeal = 1;
let plyExp = 0;
let plyExpNext = 15;
let plyExpNeed = [15, 30, 80, 150, 300, 500, 1000];
let plyImg = document.getElementById("plyImg");
let plySt = new Array(7);
for (i = 0; i < 7; i++) {
plySt[i] = document.getElementById("plySt" + i);
}
plySt[0].textContent = plyname;
// let player = new Array(7);
// player = ["plySt0", "plySt1", "plySt3", "plySt4", "plySt5", "plySt6"];
// for (i = 0; i < 7; i++) {
// player[i] = document.getElementById("player[i]");
// }
// let plySt0 = document.getElementById("plySt0");
// let plySt1 = document.getElementById("plySt1");
// let plySt2 = document.getElementById("plySt2");
// let plySt3 = document.getElementById("plySt3");
// let plySt4 = document.getElementById("plySt4");
// let plySt5 = document.getElementById("plySt5");
// let plySt6 = document.getElementById("plySt6");
//プレイヤー回復
plyImg.addEventListener("mousedown", () => {
if (flag) {
plyImg.src = "img/playerC.png";
}
});
plyImg.addEventListener("mouseup", () => {
if (flag) {
plyImg.src = "img/playerA.png";
plyHp += plyHeal;
if (plyHp > plyHpMax) {
plyHp = plyHpMax;
} else {
plySt[2].textContent = "HP:" + plyHp;
}
}
});
//敵データ;
let eneLv = 1;
let eneHp = 10;
let eneHpMax = [10, 20, 30, 20, 60, 40, 10, 80, 90, 150];
let eneAtt = [1, 4, 6, 5, 5, 4, 20, 10, 7, 14];
let Attack = 1;
let enekill = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
let eneExp = [1, 5, 3, 4, 5, 6, 15, 8, 25, 0];
let eneCnt = 19;
let eneCntMax = [19, 15, 15, 13, 15, 16, 99, 18, 15, 17];
let eneName = [
"スライム",
"コウモリ",
"ドブネズミ",
"ヘビ",
"オオカミ",
"ゴブリン",
"ゴースト",
"ゾンビ",
"ファイヤ",
"クマ",
];
// for (let i = 0; i < eneState.length; i++) {
// mog[i] = new Array(9);
// }12
// let eneStateX = 0;
// let emeStateY = 0;
// let eneState = new Array(9);
// eneState = [
// [1, 10, 10, 1, 0, 1, 19, 19],
// [2, 20, 20, 0, 0, 5, 15, 15],
// [3, 30, 30, 0, 0, 3, 15, 15],
// [4, 20, 20, 0, 0, 4, 11, 13],
// [5, 60, 60, 0, 0, 5, 15, 15],
// [6, 40, 40, 0, 0, 6, 16, 16],
// [7, 10, 10, 0, 0, 15, 99, 99],
// [8, 80, 80, 0, 0, 8, 14, 18],
// [9, 90, 90, 0, 0, 25, 10, 15],
// [10, 150, 150, 1, 0, 1, 17, 17],
// ];
// let eneState = new Array(7);
// eneState = [
// [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
// [10, 20, 30, 20, 60, 40, 10, 80, 90, 150],
// [10, 20, 30, 20, 60, 40, 10, 80, 90, 150],
// [1, 0, 0, 0, 0, 0, 0, 10, 0, 1],
// [0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
// [1, 5, 3, 4, 5, 6, 15, 8, 25, 0],
// [19, 15, 15, 11, 15, 16, 99, 14, 10, 17],
// [19, 15, 15, 13, 15, 16, 99, 18, 15, 17],
// ];
let eneImg = document.getElementById("eneImg");
let eneSt = new Array(5);
for (i = 0; i < 5; i++) {
eneSt[i] = document.getElementById("eneSt" + i);
}
eneSt[0].textContent = eneName[eneLv - 1];
// let enemy = new Array(5);
// enemy = ["eneSt0", "eneSt1", "eneSt2", "eneSt3", "eneSt4"];
// for (i = 0; i < 5; i++) {
// enemy[i] = document.getElementById("enemy[i]");
// }
//let eneImg = document.getElementById("eneImg");
// let eneSt0 = document.getElementById("eneSt0");
// let eneSt1 = document.getElementById("eneSt1");
// let eneSt2 = document.getElementById("eneSt2");
// let eneSt3 = document.getElementById("eneSt3");
// let eneSt4 = document.getElementById("eneSt4");
//敵を攻撃
//for (let i = 0; i < eneState.length; i++) {
//eneState[1][i] = eneState[2][i];
//if (eneState[i][2] <= 0) {
//for (let j = 0; j < 8; j++) {
eneImg.addEventListener("mousedown", () => {
if (flag) {
eneImg.src = "img/enemyB" + (eneLv - 1) + ".png";
}
});
eneImg.addEventListener("mouseup", () => {
if (flag) {
eneImg.src = "img/enemyA" + (eneLv - 1) + ".png";
if (eneHp > 0) {
//eneHp0
eneHp -= plyAtt; //eneHp0
} else {
// eneHp -= eneStateHpMax; //eneHp0 -= eneHpMax0
enekill[eneLv - 1]++; //enekill0
eneSt[4].textContent = "倒した回数:" + enekill[eneLv - 1]; //enekill0
//経験値の処理
plyExp += eneExp[eneLv - 1]; //eneExp0
plySt[5].textContent = "経験値:" + plyExp;
plyExpNext -= eneExp[eneLv - 1]; //eneExp0
//次のステージへ進む
eneLv++;
if (eneLv > 10) {
flag = false;
document.getElementById("clear").style.display = "block";
} else {
eneImg.src = "img/enemyA" + (eneLv - 1) + ".png";
eneHp = eneHpMax[eneLv];
eneAttack = eneAtt[eneLv - 1];
eneExp[0] = eneExp[eneLv - 1];
eneCnt = eneCntMax[eneLv - 1];
eneSt[0].textContent = eneName[eneLv - 1];
eneSt[1].textContent = "レベル:" + eneLv;
eneSt[2].textContent = "HP:" + eneHp;
eneSt[3].textContent = "攻撃力:" + eneAttSc;
eneSt[4].textContent = "倒した回数:" + eneKill[eneLv - 1];
//レベルアップ処理
if (plyExpNext <= 0) {
plyExpNext = plyExpNeed[plyLv];
plyLv++;
plySt[1].textContent = "レベル:" + plyLv;
plyHpMax = plyLv * 3 + 6;
plyHp = plyHpMax;
plySt[2].textContent = "HP:" + plyHp;
plyAtt++;
plySt[3].textContent = "攻撃力:" + plyAtt;
plyHeal++;
plySt[4].textContent = "回復魔法:" + plyHeal;
}
}
}
plySt[6].textContent = "次のレベルまでの経験値" + plyExpNext + "ポイント";
eneSt[2].textContent = "HP:" + eneHp; //eneHp0
}
});
//敵が時間ごとに攻撃
let eneSec = document.getElementById("eneSec");
let loop = setInterval(() => {
if (eneCnt > 0 && flag) {
//eneCnt0
eneCnt--; //eneCnt0
eneSec.textContent = "モンスターの攻撃まで" + eneCnt + "秒"; //eneCnt0
} else if (flag) {
plyImg.src = "img/playerB.png";
plyHp = plyHp - eneAtt[eneLv - 1]; //eneAtt0
if (plyHp > 0) {
plySt[2].textContent = "HP:" + plyHp;
eneSec.textContent = "モンスターの攻撃まで" + eneCnt + "秒"; //eneCnt0
} else {
plyHp = 0;
clearInterval(loop);
flag = false;
plySt[2].textContent = "HP:" + plyHp;
eneSec.textContent = "ゲームオーバー";
}
setTimeout(() => {
if (flag) {
eneCnt = eneCntMax[0]; //eneCnt0 = eneCntMax0
plyImg.src = "img/playerA.png";
eneSec.textContent = "モンスターの攻撃まで" + eneCnt + "秒";
}
}, 500);
}
eneSec.src = "img/clear.png";
}, 1000);
//}
let right = document.getElementById("right");
right.addEventListener("click", () => {
if (flag) {
eneLv++;
eneImg.src = "img/enemyA" + (eneLv - 1) + ".png";
eneHp = eneHpMax[eneLv - 1];
eneAttack = eneAtt[eneLv - 1];
eneExp[0] = eneExp[eneLv - 1];
eneCnt = eneCntMax[eneLv - 1];
eneSt[0].textContent = eneName[eneLv - 1];
eneSt[1].textContent = "レベル:" + eneLv;
eneSt[2].textContent = "HP:" + eneHp;
eneSt[3].textContent = "攻撃力:" + eneAtt[eneLv - 1];
eneSt[4].textContent = "倒した回数:" + eneKill[eneLv - 1];
}
});
let left = document.getElementById("left");
left.addEventListener("click", () => {
if (flag) {
eneLv--;
eneImg.src = "img/enemyA" + (eneLv - 1) + ".png";
eneHp = eneHpMax[eneLv - 1];
eneAttack = eneAtt[eneLv - 1];
eneExp[0] = eneExp[eneLv - 1];
eneCnt = eneCntMax[eneLv - 1];
eneSt[0].textContent = eneName[eneLv - 1];
eneSt[1].textContent = "レベル:" + eneLv;
eneSt[2].textContent = "HP:" + eneHp;
eneSt[3].textContent = "攻撃力:" + eneAtt[eneLv - 1];
eneSt[4].textContent = "倒した回数:" + eneKill[eneLv - 1];
}
});
| 4916763324a5c7650b0dea6bc75c4de5752a05e4 | [
"JavaScript"
] | 1 | JavaScript | s55517kt/clicker | 988673d143d69ac24ea38edfdecbb1ddc18429e7 | 1271e7b0b996788a67cfbe657270894e64a16a4b |
refs/heads/master | <file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public class TinViecLamModel
{
public int ID_ViecLam { get; set; }
public string TieuDeViecLam { get; set; }
public string MoTa { get; set; }
public int? ID_NganhNghe { get; set; }
public int? ID_ViTri { get; set; }
public string GioiTinh { get; set; }
public string YeuCauKyNang { get; set; }
public string ThoiGianThuViec { get; set; }
public int? ID_KinhNghiem { get; set; }
public int? ID_TrinhDo { get; set; }
public string MucLuong { get; set; }
public DateTime? NgayDang { get; set; }
public DateTime? NgayHetHan { get; set; }
public bool? TrangThai { get; set; }
public int? ID_CongTy { get; set; }
public int? SoLuong { get; set; }
public string YeuCauHoSo { get; set; }
public CongTy CongTy { get; set; }
public List<DangKy> DangKies { get; set; }
public KinhNghiem KinhNghiem { get; set; }
public NganhNghe NganhNghe { get; set; }
public TrinhDo TrinhDo { get; set; }
public ViTriUngTuyen ViTriUngTuyen { get; set; }
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
[Table("TaiKhoan")]
public partial class TaiKhoan
{
[Key]
public int ID_TaiKhoan { get; set; }
public int? ID_LoaiTaiKhoan { get; set; }
[StringLength(150)]
public string TenDangNhap { get; set; }
[StringLength(50)]
public string MatKhau { get; set; }
[StringLength(250)]
public string HoTen { get; set; }
[StringLength(50)]
public string Email { get; set; }
[StringLength(50)]
public string SDT { get; set; }
public int? ID_Vung { get; set; }
public virtual LoaiTaiKhoan LoaiTaiKhoan { get; set; }
public virtual VungMien VungMien { get; set; }
}
}
<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class DangKyDAO
{
TimKiemViecLamDbConText db = null;
public DangKyDAO()
{
db = new TimKiemViecLamDbConText();
}
public bool LuuCongTy(DangKy DangKy)
{
db.DangKies.Add(DangKy);
db.SaveChanges();
return true;
}
public bool XoaDangKy(int iddangky)
{
try
{
var kq = db.DangKies.Find(iddangky);
db.DangKies.Remove(kq);
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
public bool CapNhatDK(DangKy DangKy)
{
try
{
var kq = db.DangKies.Find(DangKy.ID_DangKy);
kq.NgayXem = kq.NgayXem;
kq.TrangThai = kq.TrangThai;
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
[Table("CongTy")]
public partial class CongTy
{
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public CongTy()
{
TinViecLams = new HashSet<TinViecLam>();
}
[Key]
public int ID_CongTy { get; set; }
[StringLength(255)]
[Display(Name = "Tên công ty (*)")]
[Required(ErrorMessage ="Vui lòng nhập tên công ty")]
public string TenCongTy { get; set; }
[StringLength(150)]
public string TenDangNhap { get; set; }
[StringLength(50)]
public string MatKhau { get; set; }
[StringLength(250)]
public string DiaChi { get; set; }
[StringLength(250)]
public string QuyMo { get; set; }
[StringLength(50)]
[Required(ErrorMessage = "Vui lòng nhập số điện thoại liên hệ")]
public string SDT { get; set; }
[StringLength(50)]
public string Website { get; set; }
[Required(ErrorMessage = "Vui lòng nhập sơ lượng về công ty")]
public string MoTa { get; set; }
[StringLength(150)]
public string NguoiDaiDien { get; set; }
[StringLength(150)]
public string Email { get; set; }
public int? ID_ThanhPho { get; set; }
public int? IsTuyenDung { get; set; }
public virtual ThanhPho ThanhPho { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<TinViecLam> TinViecLams { get; set; }
}
}
<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class CV_UngVienDAO
{
TimKiemViecLamDbConText db = null;
public CV_UngVienDAO()
{
db = new TimKiemViecLamDbConText();
}
public List<CV_UngVien> dsViTriUngTuyen() => db.CV_UngVien.ToList();
public bool LuuCV_UngVien(CV_UngVien CV_UngVien)
{
db.CV_UngVien.Add(CV_UngVien);
db.SaveChanges();
return true;
}
public bool XoaCV_UngVien(int ID_CV_UngVien)
{
try
{
var CV_UngVien1 = db.CV_UngVien.Find(ID_CV_UngVien);
db.CV_UngVien.Remove(CV_UngVien1);
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
public bool SuaCV_UngVien(CV_UngVien CV_UngVien)
{
try
{
var kq = db.CV_UngVien.Find(CV_UngVien.ID_CV);
kq.TieuDe = CV_UngVien.TieuDe;
kq.ID_NganhNghe = CV_UngVien.ID_NganhNghe;
kq.KyNang = CV_UngVien.KyNang;
kq.ID_ViTri = CV_UngVien.ID_ViTri;
kq.ID_TrinhDo = CV_UngVien.ID_TrinhDo;
kq.ID_KinhNghiem = CV_UngVien.ID_KinhNghiem;
kq.NgoaiNgu = CV_UngVien.NgoaiNgu;
kq.MucLuong = CV_UngVien.MucLuong;
kq.BangCap = CV_UngVien.BangCap;
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
public int LayID_CV(int idungvien)
{
var kq = (CV_UngVien) from a in db.CV_UngVien
where (a.ID_UngVien == idungvien)
select new { a.ID_CV };
int id_cv = kq.ID_CV;
return id_cv;
}
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public class CV_UngVienModel
{
public int ID_CV { get; set; }
public int ID_UngVien { get; set; }
public string TieuDe { get; set; }
public int ID_NganhNghe { get; set; }
public string KyNang { get; set; }
public int ID_ViTri { get; set; }
public int ID_TrinhDo { get; set; }
public int ID_KinhNghiem { get; set; }
public string NgoaiNgu { get; set; }
public string MucLuong { get; set; }
public string BangCap { get; set; }
public bool TrangThai { get; set; }
public KinhNghiem KinhNghiem { get; set; }
public NganhNghe NganhNghe { get; set; }
public TrinhDo TrinhDo { get; set; }
public UngVien UngVien { get; set; }
public ViTriUngTuyen ViTriUngTuyen { get; set; }
public List<DangKy> DangKies { get; set; }
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
[Table("UngVien")]
public partial class UngVien
{
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public UngVien()
{
CV_UngVien = new HashSet<CV_UngVien>();
}
[Key]
public int ID_UngVien { get; set; }
[StringLength(150)]
public string HoTen { get; set; }
[StringLength(100)]
public string MatKhau { get; set; }
[StringLength(250)]
public string DiaChi { get; set; }
[Column(TypeName = "date")]
public DateTime? NgaySinh { get; set; }
[StringLength(50)]
public string GioiTinh { get; set; }
[StringLength(150)]
public string Email { get; set; }
[StringLength(50)]
public string SDT { get; set; }
public int? ID_ThanhPho { get; set; }
public int? IsUngVien { get; set; }
[StringLength(255)]
public string TenDangNhap { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<CV_UngVien> CV_UngVien { get; set; }
public virtual ThanhPho ThanhPho { get; set; }
}
}
<file_sep>using Model.Dao;
using Model.EF;
using System.Collections.Generic;
using System.Data;
using System.Web.Mvc;
namespace TimKiemViecLam.Controllers
{
public class CongTyController : Controller
{
private CongTyDAO congTyDAO;
private ThanhPhoDAO thanhPhoDAO;
public CongTyController()
{
congTyDAO = new CongTyDAO();
thanhPhoDAO = new ThanhPhoDAO();
}
// GET: CongTy
public ActionResult Index()
{
return View();
}
[HttpGet]
public ActionResult AddOrEdit()
{
DataTable _dt = thanhPhoDAO.LayDsThanhPho();
ViewBag.cityList = ToSelectList(_dt, "ID_ThanhPho", "TenThanhPho");
return View();
}
[HttpPost]
public ActionResult AddOrEdit(CongTy congTy)
{
//var companydao = new congtydao();
if (ModelState.IsValid)
{
bool result = congTyDAO.LuuCongTy(congTy);
if (result)
{
return Redirect(Request.UrlReferrer.ToString());
}
else
{
ModelState.AddModelError("", "Create company failed!");
}
}
return View(congTy);
}
[NonAction]
public SelectList ToSelectList(DataTable table, string valueField, string textField)
{
List<SelectListItem> list = new List<SelectListItem>();
foreach (DataRow row in table.Rows)
{
list.Add(new SelectListItem()
{
Text = row[textField].ToString(),
Value = row[valueField].ToString()
});
}
return new SelectList(list, "Value", "Text");
}
}
}<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class NganhNgheDAO
{
TimKiemViecLamDbConText db = null;
public NganhNgheDAO()
{
db = new TimKiemViecLamDbConText();
}
public List<NganhNghe> dsNganhNghe() {
return db.NganhNghes.ToList();
}
public bool LuuNganh(NganhNghe nganhNghe)
{
db.NganhNghes.Add(nganhNghe);
db.SaveChanges();
return true;
}
public bool XoaNganh(int id)
{
try
{
var kq = db.NganhNghes.Find(id);
db.NganhNghes.Remove(kq);
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
public bool SuaNganhNghe(NganhNghe nganhNghe)
{
try
{
var kq = db.NganhNghes.Find(nganhNghe.ID_NganhNghe);
kq.TenNganhNghe = nganhNghe.TenNganhNghe;
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
[Table("TinViecLam")]
public partial class TinViecLam
{
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public TinViecLam()
{
DangKies = new HashSet<DangKy>();
}
[Key]
public int ID_ViecLam { get; set; }
[StringLength(250)]
public string TieuDeViecLam { get; set; }
public string MoTa { get; set; }
public int? ID_NganhNghe { get; set; }
public int? ID_ViTri { get; set; }
[StringLength(100)]
public string GioiTinh { get; set; }
public string YeuCauKyNang { get; set; }
[StringLength(100)]
public string ThoiGianThuViec { get; set; }
public int? ID_KinhNghiem { get; set; }
public int? ID_TrinhDo { get; set; }
[StringLength(50)]
public string MucLuong { get; set; }
[Column(TypeName = "date")]
public DateTime? NgayDang { get; set; }
[Column(TypeName = "date")]
public DateTime? NgayHetHan { get; set; }
public bool? TrangThai { get; set; }
public int? ID_CongTy { get; set; }
public int? SoLuong { get; set; }
public string YeuCauHoSo { get; set; }
public virtual CongTy CongTy { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<DangKy> DangKies { get; set; }
public virtual KinhNghiem KinhNghiem { get; set; }
public virtual NganhNghe NganhNghe { get; set; }
public virtual TrinhDo TrinhDo { get; set; }
public virtual ViTriUngTuyen ViTriUngTuyen { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace TimKiemViecLam.Areas.Admin.Controllers
{
public class TrinhDoController : Controller
{
// GET: Admin/TrinhDo
public ActionResult Index()
{
return View();
}
}
}<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.ViewModel
{
class DangTuyenDungModel
{
public int ID_ViecLam { get; set; }
[StringLength(250)]
public string TieuDeViecLam { get; set; }
public string MoTa { get; set; }
public int? ID_NganhNghe { get; set; }
public int? ID_ViTri { get; set; }
[StringLength(100)]
public string GioiTinh { get; set; }
public string YeuCauKyNang { get; set; }
[StringLength(100)]
public string ThoiGianThuViec { get; set; }
public int? ID_KinhNghiem { get; set; }
public int? ID_TrinhDo { get; set; }
[StringLength(50)]
public string MucLuong { get; set; }
[Column(TypeName = "date")]
public DateTime? NgayDang { get; set; }
[Column(TypeName = "date")]
public DateTime? NgayHetHan { get; set; }
public bool? TrangThai { get; set; }
public int? ID_CongTy { get; set; }
public int? SoLuong { get; set; }
public string YeuCauHoSo { get; set; }
public virtual CongTy CongTy { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<DangKy> DangKies { get; set; }
public virtual KinhNghiem KinhNghiem { get; set; }
public virtual NganhNghe NganhNghe { get; set; }
public virtual TrinhDo TrinhDo { get; set; }
public virtual ViTriUngTuyen ViTriUngTuyen { get; set; }
public int ID_CongTy { get; set; }
[StringLength(255)]
[Display(Name = "Tên công ty (*)")]
[Required(ErrorMessage = "Vui lòng nhập tên công ty")]
public string TenCongTy { get; set; }
[StringLength(150)]
public string TenDangNhap { get; set; }
[StringLength(50)]
public string MatKhau { get; set; }
[StringLength(250)]
public string DiaChi { get; set; }
[StringLength(250)]
public string QuyMo { get; set; }
[StringLength(50)]
[Required(ErrorMessage = "Vui lòng nhập số điện thoại liên hệ")]
public string SDT { get; set; }
[StringLength(50)]
public string Website { get; set; }
[Required(ErrorMessage = "Vui lòng nhập sơ lượng về công ty")]
public string MoTa { get; set; }
[StringLength(150)]
public string NguoiDaiDien { get; set; }
[StringLength(150)]
public string Email { get; set; }
public int? ID_ThanhPho { get; set; }
public int? IsTuyenDung { get; set; }
public virtual ThanhPho ThanhPho { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<TinViecLam> TinViecLams { get; set; }
}
}
<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class TimKiemTinViecLamDao
{
TimKiemViecLamDbConText db = null;
public TimKiemTinViecLamDao()
{
db = new TimKiemViecLamDbConText();
}
/* public IQueryable<TinViecLam> timKiemTinViecLam(SearchHomeModel searchModel)
{
var result = db.TinViecLams.AsQueryable();
IEnumerable<CongTy> congTies = db.CongTies.Where(t => t.ID_ThanhPho == searchModel.ID_ThanhPho).AsEnumerable();
TinViecLam tinViecLam = db.TinViecLams.Where
if (searchModel != null)
{
if (searchModel.ID_NganhNghe.HasValue)
result = result.Where(x => x.ID_NganhNghe == searchModel.ID_NganhNghe && x.ID_CongTy = (congTies.));
result = result.Where(x => x.ID_CongTy);
}
return result;
}
*/
public SearchHomeModel listHome()
{
CongTyDAO congTyDAO = new CongTyDAO();
NganhNgheDAO ngheDAO = new NganhNgheDAO();
ThanhPhoDAO thanhPhoDAO= new ThanhPhoDAO();
TinViecLamDAO tinViecLamDAO = new TinViecLamDAO();
var CongTyList = congTyDAO.dsCongTy();
var NganhNGheList = ngheDAO.dsNganhNghe();
var thanhPhoList = thanhPhoDAO.dsThanhPho();
var tinviecLamList = tinViecLamDAO.dsTinViecLam();
var cV_UngViens = db.CV_UngVien.ToList();
var UngViens = db.UngViens.ToList();
var model = new SearchHomeModel()
{
CvUngviens = cV_UngViens,
Ungviens = UngViens,
ThanhPhos = thanhPhoList,
NganhNghes = NganhNGheList,
CongTys = CongTyList,
TinViecLams = tinviecLamList
};
return model;
}
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
[Table("VungMien")]
public partial class VungMien
{
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public VungMien()
{
TaiKhoans = new HashSet<TaiKhoan>();
ThanhPhoes = new HashSet<ThanhPho>();
}
[Key]
public int ID_Vung { get; set; }
[StringLength(150)]
public string TenVung { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<TaiKhoan> TaiKhoans { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<ThanhPho> ThanhPhoes { get; set; }
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public class UngVienModel
{
public int ID_UngVien { get; set; }
public string HoTen { get; set; }
public string MatKhau { get; set; }
public string DiaChi { get; set; }
public DateTime? NgaySinh { get; set; }
public string GioiTinh { get; set; }
public string Email { get; set; }
public string SDT { get; set; }
public List<CV_UngVien> CV_UngVien { get; set; }
public ThanhPho ThanhPho { get; set; }
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
[Table("ThanhPho")]
public partial class ThanhPho
{
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public ThanhPho()
{
CongTies = new HashSet<CongTy>();
UngViens = new HashSet<UngVien>();
}
[Key]
public int ID_ThanhPho { get; set; }
[StringLength(50)]
public string TenThanhPho { get; set; }
public int? ID_Vung { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<CongTy> CongTies { get; set; }
public virtual VungMien VungMien { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<UngVien> UngViens { get; set; }
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
[Table("NganhNghe")]
public partial class NganhNghe
{
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public NganhNghe()
{
CV_UngVien = new HashSet<CV_UngVien>();
TinViecLams = new HashSet<TinViecLam>();
}
[Key]
public int ID_NganhNghe { get; set; }
[StringLength(250)]
public string TenNganhNghe { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<CV_UngVien> CV_UngVien { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<TinViecLam> TinViecLams { get; set; }
}
}
<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Data.Entity.Validation;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class CongTyDAO
{
TimKiemViecLamDbConText db = null;
public CongTyDAO()
{
db = new TimKiemViecLamDbConText();
}
public List<CongTy> dsCongTy() => db.CongTies.ToList();
public bool LuuCongTy(CongTy CongTy)
{
db.CongTies.Add(CongTy);
try
{
db.SaveChanges();
}
catch (DbEntityValidationException ex)
{
foreach (var entityValidationErrors in ex.EntityValidationErrors)
{
foreach (var validationError in entityValidationErrors.ValidationErrors)
{
System.Diagnostics.Debug.WriteLine("Property: " + validationError.PropertyName + " Error: " + validationError.ErrorMessage);
}
}
}
return true;
}
public bool XoaCongTy(int ID_CongTy)
{
try
{
var CongTi = db.CongTies.Find(ID_CongTy);
db.CongTies.Remove(CongTi);
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
//kiem tra ten dang nhap
public bool KiemTraTenDangNhapCT(string tendangnhap)
{
var kq = db.CongTies.ToList();
foreach (var s in kq)
{
if (s.TenDangNhap == tendangnhap)
return false;
else
return true;
}
return true;
}
public bool SuaCongTy(CongTy CongTy)
{
try
{
var cty = db.CongTies.Find(CongTy.ID_CongTy);
cty.TenCongTy = CongTy.TenDangNhap;
cty.TenDangNhap = CongTy.TenDangNhap;
cty.MatKhau = CongTy.MatKhau;
cty.DiaChi = CongTy.DiaChi;
cty.SDT = CongTy.SDT;
cty.Website = CongTy.Website;
cty.MoTa = CongTy.MoTa;
cty.NguoiDaiDien = CongTy.NguoiDaiDien;
cty.Email = CongTy.Email;
cty.ID_ThanhPho = CongTy.ID_ThanhPho;
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Model.Dao;
using Model.EF;
namespace TimKiemViecLam.Areas.Admin.Controllers
{
public class UserController : BaseController
{
/*// GET: Admin/User
public ActionResult Index(string seaching, int? idRole, int page = 1, int pageSize = 5,long? selectedI = null)
{
var dao = new UserDao();
var model = dao.listAllpage(seaching, page, pageSize, idRole);
ViewBag.idRole = new SelectList(dao.listAc(), "Role_id", "RoleName", selectedI);
ViewBag.seaching = seaching;
return View(model);
}
public void SetViewBagrole(long? selectedId = null)
{
var dao = new UserDao();
ViewBag.idRole = new SelectList(dao.listAc(), "Role_id", "RoleName", selectedId);
}
[HttpGet]
public ActionResult Edit(int id)
{
var user = new UserDao();
var rusult = user.ViewDetail(id);
return Class1.csView(rusult);
}
public ActionResult Create()
{
SetViewBag();
return View();
}
[HttpPost]
public ActionResult Delete(int id)
{
new UserDao().Delete(id);
SetAlert("Thêm User thành công", "success");
return RedirectToAction("index");
}
public void SetViewBag(long? selectedId = null)
{
var dao = new RoleDao();
ViewBag.idRole = new SelectList(dao.listAll(), "Role_id", "RoleName", selectedId);
}
[HttpPost]
public JsonResult ChangeStatus(long id)
{
var result = new UserDao().ChangeStatus(id);
return Json(new
{
status = result
});
}*/
/* [HttpPost]
public ActionResult Create(Account user)
{
if (ModelState.IsValid)
{
var dao = new UserDao();
long id = dao.Insert(user);
if (id > 0)
{
SetAlert("Thêm User thành công", "success");
return RedirectToAction("Index", "User");
}
else
{
ModelState.AddModelError("", "Thêm User không thành công");
}
}
return View("Index");
}
[HttpPost]
public ActionResult Edit(Account user)
{
if (ModelState.IsValid)
{
var dao = new UserDao();
var result = dao.Edit(user);
if (result == true )
{
if(user.idRole != 1)
{
SetAlert("Sửa User thành công", "success");
return Redirect("/Admin/Home/HomeMember");
}
else
{
SetAlert("Sửa User thành công", "success");
return RedirectToAction("index");
}
}
else
{
ModelState.AddModelError("", "Cập nhật không thành công");
}
}
return View("index");
}*/
}
}<file_sep>using Model.EF;
using PagedList;
using PagedList.Mvc;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class KinhNghiemDAO
{
TimKiemViecLamDbConText db =null;
public KinhNghiemDAO()
{
db = new TimKiemViecLamDbConText();
}
//get all KinhNghiem
public List<KinhNghiem> GetAllList() => db.KinhNghiems.ToList();
public bool luuKinhNghiem(KinhNghiem km)
{
db.KinhNghiems.Add(km);
db.SaveChanges();
return true;
}
public IEnumerable<KinhNghiem> ListAll1(string searching, int page, int pageSize)
{
IQueryable<KinhNghiem> model = db.KinhNghiems;
if (!string.IsNullOrEmpty(searching))
{
model = model.Where(x => x.TenKinhNghiem.Contains(searching)).OrderBy(x => x.ID_KinhNghiem);
}
return model.OrderBy(x => x.ID_KinhNghiem).ToPagedList(page, pageSize);
}
public bool xoaKinhNghiem(int ID_KinhNghiem)
{
try
{
var kinhNghiem = db.KinhNghiems.Find(ID_KinhNghiem);
db.KinhNghiems.Remove(kinhNghiem);
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
public bool suaKinhNghiem(KinhNghiem km)
{
try
{
var kinhNghiem = db.KinhNghiems.Find(km.ID_KinhNghiem);
kinhNghiem.TenKinhNghiem = km.TenKinhNghiem;
kinhNghiem.TinViecLams = km.TinViecLams;
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
}
}
<file_sep>$(document).ready(function () {
// Hàm active tab nào đó
$('.danhMuc a').click(function (e) {
var href = $(this).attr('href');
$('.danhMuc a').removeClass('current');
$('.danhmic_listitem').removeClass('current');
$(this).addClass('current');
$(href).addClass('current');
//button
var list_a = $('.danhMuc a');
var a_selected = $(e.target);
console.log(list_a);
console.log(a_selected);
list_a.removeClass('current_select');
a_selected.addClass('current_select');
})
// hiện danh mục và hóa đơn
$('.list_table div').on("click", function () {
var nameTB = $(this).attr('data-content');
console.log(nameTB);
$('#home-tab-TT').text(nameTB);
});
//chọn món
function formatNumber(num) {
return num.toString().replace(/(\d)(?=(\d{3})+(?!\d))/g, "$1,")
}
// Hàm active tab nào đó
});<file_sep>using Model.Dao;
using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace TimKiemViecLam.Areas.Admin.Controllers
{
public class NganhNgheController : BaseController
{
// GET: Admin/NganhNghe
public ActionResult Index()
{
var nganhNghe = new NganhNgheDAO();
List<NganhNghe> nganhNghes = nganhNghe.dsNganhNghe();
return View(nganhNghes);
}
[HttpPost]
public ActionResult Delete(int id)
{
new NganhNgheDAO().XoaNganh(id);
SetAlert("Xóa ngành nghề thành công", "success");
return RedirectToAction("index");
}
[HttpPost]
public ActionResult Create(NganhNghe nganhNghe)
{
if (ModelState.IsValid)
{
var dao = new NganhNgheDAO();
var check = dao.LuuNganh(nganhNghe);
if (check == true)
{
SetAlert("Thêm ngành nghề thành công", "success");
return RedirectToAction("index");
}
else
{
ModelState.AddModelError("", "Thêm mới không thành công");
}
}
else
{
Response.Write("Không được để trống !");
}
return View("index");
}
[HttpPost]
public ActionResult Edit(NganhNghe nganhNghe)
{
if (ModelState.IsValid)
{
var dao = new NganhNgheDAO();
var check = dao.SuaNganhNghe(nganhNghe);
if (check == true)
{
SetAlert("Cập nhật ngành nghề thành công", "success");
return RedirectToAction("index");
}
else
{
ModelState.AddModelError("", "Cập nhật không thành công");
}
}
return View("index");
}
}
}<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public class ThanhPhoModel
{
public int ID_ThanhPho { get; set; }
public string TenThanhPho { get; set; }
public int? ID_Vung { get; set; }
public List<CongTy> CongTies { get; set; }
public VungMien VungMien { get; set; }
public List<UngVien> UngViens { get; set; }
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public class TaiKhoanModel
{
public int ID_TaiKhoan { get; set; }
public int? ID_LoaiTaiKhoan { get; set; }
public string TenDangNhap { get; set; }
public string MatKhau { get; set; }
public string HoTen { get; set; }
public string Email { get; set; }
public string SDT { get; set; }
public int? ID_Vung { get; set; }
public LoaiTaiKhoan LoaiTaiKhoan { get; set; }
public VungMien VungMien { get; set; }
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public partial class ViTriUngTuyenModel
{
public int ID_ViTri { get; set; }
public string TenViTri { get; set; }
public List<CV_UngVien> CV_UngVien { get; set; }
public List<TinViecLam> TinViecLams { get; set; }
}
}
<file_sep>using Model.Dao;
using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using TimKiemViecLam.Areas.Admin.Models;
using TimKiemViecLam.Common;
namespace TimKiemViecLam.Controllers
{
public class LoginClientController : Controller
{
// GET: LoginClient
public ActionResult Index()
{
return View();
}
[HttpPost]
public ActionResult Login(LoginModel model)
{
{
if (ModelState.IsValid)
{
var dao = new TaiKhoanDAO();
var result = dao.Login(model.UserName, model.PassWord);
if (result != null)
{
var user_Sesion = new UserLogin();
if (result is UngVien)
{
UngVien ungVien = (UngVien)result;
user_Sesion.TenDangNhap = ungVien.TenDangNhap;
user_Sesion.HoTen = ungVien.HoTen;
user_Sesion.IsUngVien = ungVien.IsUngVien;
Session.Add("User_Session1", user_Sesion);
return Redirect("/Admin/Home/Index");
}
if (result is CongTy)
{
CongTy congTy = (CongTy)result;
user_Sesion.TenDangNhap = congTy.TenDangNhap;
user_Sesion.HoTen = congTy.TenCongTy;
user_Sesion.IsTuyenDung = congTy.IsTuyenDung;
Session.Add("User_Session1", user_Sesion);
return RedirectToAction("Index", "Home");
}
}
else
{
ModelState.AddModelError("", "Tài khoản hoặc mật khẩu không đúng !");
}
}
return View("index");
}
}
}
}<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class VungMienDAO
{
TimKiemViecLamDbConText db = null;
public VungMienDAO()
{
db = new TimKiemViecLamDbConText();
}
public List<VungMien> dsVung()
{
return db.VungMiens.ToList();
}
public bool LuuVung(VungMien vungMien)
{
db.VungMiens.Add(vungMien);
db.SaveChanges();
return true;
}
public bool XoaVung(int id)
{
try
{
var kq = db.VungMiens.Find(id);
db.VungMiens.Remove(kq);
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
public bool SuaVung(VungMien vungMien)
{
try
{
var kq = db.VungMiens.Find(vungMien.ID_Vung);
kq.TenVung = vungMien.TenVung;
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public class VungMienModel
{
public int ID_Vung { get; set; }
public string TenVung { get; set; }
public List<TaiKhoan> TaiKhoans { get; set; }
public List<ThanhPho> ThanhPhoes { get; set; }
}
}
<file_sep>using Model.EF;
using System;
using System.Configuration;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class ThanhPhoDAO
{
TimKiemViecLamDbConText db = null;
public ThanhPhoDAO()
{
db = new TimKiemViecLamDbConText();
}
public List<ThanhPho> dsThanhPho()
{
return db.ThanhPhoes.ToList();
}
public DataTable LayDsThanhPho()
{
string constr = ConfigurationManager.ConnectionStrings["TimKiemViecLam"].ToString();
SqlConnection _con = new SqlConnection(constr);
SqlDataAdapter _da = new SqlDataAdapter("Select * From ThanhPho", constr);
DataTable _dt = new DataTable();
_da.Fill(_dt);
return _dt;
}
public bool LuuTP(ThanhPho thanhPho)
{
db.ThanhPhoes.Add(thanhPho);
db.SaveChanges();
return true;
}
public bool XoaThanhPho(int id)
{
try
{
var kq = db.ThanhPhoes.Find(id);
db.ThanhPhoes.Remove(kq);
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
public bool SuaThanhPho(ThanhPho thanhPho)
{
try
{
var kq = db.ThanhPhoes.Find(thanhPho.ID_ThanhPho);
kq.TenThanhPho = thanhPho.TenThanhPho;
kq.ID_Vung= thanhPho.ID_Vung;
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public class NganhNgheModel
{
public int ID_NganhNghe { get; set; }
public string TenNganhNghe { get; set; }
public List<CV_UngVien> CV_UngVien { get; set; }
public List<TinViecLam> TinViecLams { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Model.Dao;
using Model.EF;
using TimKiemViecLam.Areas.Admin.Models;
using TimKiemViecLam.Common;
using TimKiemViecLam.Areas;
namespace TimKiemViecLam.Areas.Admin.Controllers
{
public class LoginController : Controller
{
// GET: Admin/Login
public ActionResult Index()
{
return View();
}
[HttpPost]
public ActionResult Login(LoginModel model)
{
{
if (ModelState.IsValid)
{
var dao = new TaiKhoanDAO();
var result = dao.Login(model.UserName, model.PassWord);
if (result !=null)
{
var user_Sesion = new UserLogin();
if (result is UngVien)
{
UngVien ungVien = (UngVien) result ;
user_Sesion.TenDangNhap = ungVien.TenDangNhap;
user_Sesion.HoTen = ungVien.HoTen;
user_Sesion.IsUngVien = ungVien.IsUngVien;
Session.Add("User_Session", user_Sesion);
return RedirectToAction("Index", "Home");
}
if (result is CongTy)
{
CongTy congTy = (CongTy)result;
user_Sesion.TenDangNhap = congTy.TenDangNhap;
user_Sesion.HoTen = congTy.TenCongTy;
user_Sesion.IsUngVien = congTy.IsTuyenDung;
Session.Add("User_Session", user_Sesion);
return RedirectToAction("Index", "Home");
}
if (result is TaiKhoan)
{
TaiKhoan taiKhoanAdmin = (TaiKhoan)result;
user_Sesion.TenDangNhap = taiKhoanAdmin.TenDangNhap;
user_Sesion.HoTen = taiKhoanAdmin.HoTen;
user_Sesion.ID_LoaiTaiKhoan = taiKhoanAdmin.ID_LoaiTaiKhoan;
Session.Add("User_Session", user_Sesion);
return Redirect("/Admin/Home/Index");
}
}
else
{
ModelState.AddModelError("", "Tài khoản hoặc mật khẩu không đúng !");
}
}
return View("index");
}
}
}
}<file_sep>$(document).ready(function(){
$("#btncreate").on("click", function (e) {
//e.preventDefault();
//if($("#iAreaname").input == null)
//{
// $(".errorName").html("Không được để trống !");
//}
//else
//{
// $("#iAreaname").value = input.value;
//}
});
})<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class TrinhDoDAO
{
TimKiemViecLamDbConText db = null;
public TrinhDoDAO()
{
db = new TimKiemViecLamDbConText();
}
public List<TrinhDo> dsTrinhDo() => db.TrinhDoes.ToList();
public bool LuuTrinhDo(TrinhDo trinhDo ) {
db.TrinhDoes.Add(trinhDo);
db.SaveChanges();
return true;
}
public bool XoaTrinhDo(int ID_trinhDo)
{
try
{
var TrinhDoess = db.TrinhDoes.Find(ID_trinhDo);
db.TrinhDoes.Remove(TrinhDoess);
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
public bool SuaTrinhDo(TrinhDo trinhDo)
{
try
{
var trinhDo1 = db.TrinhDoes.Find(trinhDo.ID_TrinhDo);
trinhDo1.TenTrinhDo = trinhDo.TenTrinhDo;
trinhDo1.TinViecLams = trinhDo.TinViecLams;
trinhDo1.CV_UngVien = trinhDo.CV_UngVien;
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public class KinhNghiemModel
{
public int ID_KinhNghiem { get; set; }
public string TenKinhNghiem { get; set; }
public List<CV_UngVien> CV_UngVien { get; set; }
public List<TinViecLam> TinViecLams { get; set; }
}
}
<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.ViewModel
{
public class HomeView
{
public List<UngVien> UngViens { get; set; }
public List<TinViecLam> TinViecLams { get; set; }
public List<CongTy> CongTies { get; set; }
public List<CV_UngVien> CV_UngVien { get; set; }
public CongTy CongTy { get; set; }
public NganhNghe NganhNghe { get; set; }
public KinhNghiem KinhNghiem { get; set; }
public ViTriUngTuyen ViTriUngTuyen { get; set; }
public TrinhDo TrinhDo { get; set; }
public ThanhPho ThanhPho { get; set; }
public TinViecLam TinViecLam { get; set; }
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
[Table("DangKy")]
public partial class DangKy
{
[Key]
public int ID_DangKy { get; set; }
public int? ID_CV { get; set; }
public int? ID_ViecLam { get; set; }
[Column(TypeName = "date")]
public DateTime? NgayXem { get; set; }
[Column(TypeName = "date")]
public DateTime? NgayUngTuyen { get; set; }
public bool? TrangThai { get; set; }
public virtual CV_UngVien CV_UngVien { get; set; }
public virtual TinViecLam TinViecLam { get; set; }
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public class DangKyModel
{
public int ID_DangKy { get; set; }
public int? ID_CV { get; set; }
public int? ID_ViecLam { get; set; }
public DateTime? NgayXem { get; set; }
public DateTime? NgayUngTuyen { get; set; }
public bool? TrangThai { get; set; }
public CV_UngVien CV_UngVien { get; set; }
public TinViecLam TinViecLam { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace TimKiemViecLam.Common
{
public class UserLogin
{
public int ID_TaiKhoan { get; set; }
public string TenDangNhap { get; set; }
public string HoTen { get; set; }
public int? ID_LoaiTaiKhoan { get; set; }
public int? IsUngVien { get; set; }
public int? IsTuyenDung { get; set; }
}
}<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
[Table("KinhNghiem")]
public partial class KinhNghiem
{
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public KinhNghiem()
{
CV_UngVien = new HashSet<CV_UngVien>();
TinViecLams = new HashSet<TinViecLam>();
}
[Key]
public int ID_KinhNghiem { get; set; }
[StringLength(255)]
public string TenKinhNghiem { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<CV_UngVien> CV_UngVien { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<TinViecLam> TinViecLams { get; set; }
}
}
<file_sep>using Model.EF;
using Model.ViewModel;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class HomeDAO
{
TimKiemViecLamDbConText db = null;
public HomeDAO() {
db = new TimKiemViecLamDbConText();
}
public HomeView ListAs()
{
var cV_UngViens = db.CV_UngVien.ToList();
var UngViens = db.UngViens.ToList();
var tinViecLam = db.TinViecLams.ToList();
var congTies = db.CongTies.ToList();
var model = new HomeView
{
CV_UngVien = cV_UngViens,
UngViens = UngViens,
TinViecLams = tinViecLam,
CongTies = congTies
};
return model;
}
public HomeView ListDetail(int? id)
{
if ( db.TinViecLams.Find(id) == null)
{
return null;
}
TinViecLam tinViecLam = db.TinViecLams.Find(id);
var congty = db.CongTies.Find(tinViecLam.ID_CongTy);
var thanhPho = db.ThanhPhoes.Find(congty.ID_ThanhPho);
var nganhNghe = db.NganhNghes.Find(tinViecLam.ID_NganhNghe);
var trinhdo = db.TrinhDoes.Find(tinViecLam.ID_TrinhDo);
var kinhnghiem = db.KinhNghiems.Find(tinViecLam.ID_KinhNghiem);
var vitri = db.ViTriUngTuyens.Find(tinViecLam.ID_ViTri);
CongTyDAO congTyDAO = new CongTyDAO();
NganhNgheDAO ngheDAO = new NganhNgheDAO();
ThanhPhoDAO thanhPhoDAO = new ThanhPhoDAO();
TinViecLamDAO tinViecLamDAO = new TinViecLamDAO();
var CongTyList = congTyDAO.dsCongTy();
var tinviecLamList = tinViecLamDAO.dsTinViecLam();
var model = new HomeView
{
CongTy = congty,
ThanhPho = thanhPho,
NganhNghe = nganhNghe,
TrinhDo = trinhdo,
KinhNghiem = kinhnghiem,
ViTriUngTuyen = vitri,
TinViecLam = tinViecLam,
TinViecLams = tinviecLamList,
CongTies = CongTyList,
};
return model;
}
}
}
<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class LoaiTaiKhoanDAO
{
TimKiemViecLamDbConText db = null;
public LoaiTaiKhoanDAO()
{
db = new TimKiemViecLamDbConText();
}
public List<LoaiTaiKhoan> dsLoaiTK()
{
return db.LoaiTaiKhoans.ToList();
}
public bool LuuLoaiTK(LoaiTaiKhoan LoaiTaiKhoan)
{
db.LoaiTaiKhoans.Add(LoaiTaiKhoan);
db.SaveChanges();
return true;
}
}
}
<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class ViTriUngTuyenDao
{
TimKiemViecLamDbConText db = null;
public ViTriUngTuyenDao()
{
db = new TimKiemViecLamDbConText();
}
public List<ViTriUngTuyen> dsViTriUngTuyen() => db.ViTriUngTuyens.ToList();
public bool LuuViTriUngTuyen(ViTriUngTuyen viTriUngTuyen)
{
db.ViTriUngTuyens.Add(viTriUngTuyen);
db.SaveChanges();
return true;
}
public bool XoaViTriUngTuyen(int ID_ViTriUngTuyen)
{
try
{
var ViTriUngTuyens1 = db.ViTriUngTuyens.Find(ID_ViTriUngTuyen);
db.ViTriUngTuyens.Remove(ViTriUngTuyens1);
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
public bool SuaViTriUngTuyen(ViTriUngTuyen ViTriUngTuyen)
{
try
{
var ViTriUngTuyen1 = db.ViTriUngTuyens.Find(ViTriUngTuyen.ID_ViTri);
ViTriUngTuyen1.TenViTri = ViTriUngTuyen.TenViTri;
ViTriUngTuyen1.TinViecLams = ViTriUngTuyen.TinViecLams;
ViTriUngTuyen1.CV_UngVien = ViTriUngTuyen.CV_UngVien;
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
}
}
<file_sep>using Model.EF;
using Model.Dao;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using TimKiemViecLam.Areas.Admin.Controllers;
namespace TimKiemViecLam.Controllers
{
public class TinViecLamController : BaseController
{
TimKiemViecLamDbConText db = null;
public TinViecLamController()
{
db = new TimKiemViecLamDbConText();
}
// GET: TinViecLam
public ActionResult Index()
{
var dao = new HomeDAO();
var model = dao.ListAs();
ViewBag.NganhNghe = new SelectList(db.NganhNghes, "ID_NganhNghe", "TenNganhNghe");
ViewBag.ThanhPho = new SelectList(db.ThanhPhoes, "ID_ThanhPho", "TenThanhPho");
var timKiemTinViecLamDao = new TimKiemTinViecLamDao();
SearchHomeModel searchHomeModel = timKiemTinViecLamDao.listHome();
return View(searchHomeModel);
}
public ActionResult DetailIndex(int? id)
{
var dao = new HomeDAO();
var model = dao.ListDetail(id);
if (id != null) {
return View(model);
}
if (model == null)
{
return RedirectToAction("Index", "TinViecLam");
}
return RedirectToAction("Index", "TinViecLam");
}
}
}<file_sep>using Model.Dao;
using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace TimKiemViecLam.Areas.Admin.Controllers
{
public class KinhNghiemController : BaseController
{
// GET: Admin/KinhNghiem
public ActionResult Index()
{
var kinhNghiem = new KinhNghiemDAO();
List<KinhNghiem> kinhNghiems = kinhNghiem.GetAllList();
return View(kinhNghiems);
}
[HttpPost]
public ActionResult Delete(int id)
{
new KinhNghiemDAO().xoaKinhNghiem(id);
SetAlert("Xóa kinh nghiệm thành công", "success");
return RedirectToAction("index");
}
[HttpPost]
public ActionResult Create(KinhNghiem kinhNghiem)
{
if (ModelState.IsValid)
{
var dao = new KinhNghiemDAO();
var check = dao.luuKinhNghiem(kinhNghiem);
if (check == true)
{
SetAlert("Thêm kinh nghiệm thành công", "success");
return RedirectToAction("index");
}
else
{
ModelState.AddModelError("", "Thêm mới không thành công");
}
}
else
{
Response.Write("Không được để trống !");
}
return View("index");
}
[HttpPost]
public ActionResult Edit(KinhNghiem kinhNghiem)
{
if (ModelState.IsValid)
{
var dao = new KinhNghiemDAO();
var check = dao.suaKinhNghiem(kinhNghiem);
if (check == true)
{
SetAlert("Cập nhật kinh nghiệm thành công", "success");
return RedirectToAction("index");
}
else
{
ModelState.AddModelError("", "Cập nhật không thành công");
}
}
return View("index");
}
}
}<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public partial class CV_UngVien
{
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public CV_UngVien()
{
DangKies = new HashSet<DangKy>();
}
[Key]
public int ID_CV { get; set; }
public int? ID_UngVien { get; set; }
[StringLength(255)]
public string TieuDe { get; set; }
public int? ID_NganhNghe { get; set; }
public string KyNang { get; set; }
public int? ID_ViTri { get; set; }
public int? ID_TrinhDo { get; set; }
public int? ID_KinhNghiem { get; set; }
[StringLength(255)]
public string NgoaiNgu { get; set; }
[StringLength(100)]
public string MucLuong { get; set; }
[StringLength(200)]
public string BangCap { get; set; }
public bool? TrangThai { get; set; }
public virtual KinhNghiem KinhNghiem { get; set; }
public virtual NganhNghe NganhNghe { get; set; }
public virtual TrinhDo TrinhDo { get; set; }
public virtual UngVien UngVien { get; set; }
public virtual ViTriUngTuyen ViTriUngTuyen { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<DangKy> DangKies { get; set; }
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Data.Entity;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
public partial class TimKiemViecLamDbConText : DbContext
{
public TimKiemViecLamDbConText()
: base("name=TimKiemViecLam")
{
}
public virtual DbSet<CongTy> CongTies { get; set; }
public virtual DbSet<CV_UngVien> CV_UngVien { get; set; }
public virtual DbSet<DangKy> DangKies { get; set; }
public virtual DbSet<KinhNghiem> KinhNghiems { get; set; }
public virtual DbSet<LoaiTaiKhoan> LoaiTaiKhoans { get; set; }
public virtual DbSet<NganhNghe> NganhNghes { get; set; }
public virtual DbSet<TaiKhoan> TaiKhoans { get; set; }
public virtual DbSet<ThanhPho> ThanhPhoes { get; set; }
public virtual DbSet<TinViecLam> TinViecLams { get; set; }
public virtual DbSet<TrinhDo> TrinhDoes { get; set; }
public virtual DbSet<UngVien> UngViens { get; set; }
public virtual DbSet<ViTriUngTuyen> ViTriUngTuyens { get; set; }
public virtual DbSet<VungMien> VungMiens { get; set; }
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
modelBuilder.Entity<CongTy>()
.Property(e => e.TenDangNhap)
.IsUnicode(false);
modelBuilder.Entity<CongTy>()
.Property(e => e.MatKhau)
.IsUnicode(false);
modelBuilder.Entity<CongTy>()
.Property(e => e.SDT)
.IsUnicode(false);
modelBuilder.Entity<CongTy>()
.Property(e => e.Website)
.IsUnicode(false);
modelBuilder.Entity<CongTy>()
.Property(e => e.Email)
.IsUnicode(false);
modelBuilder.Entity<TaiKhoan>()
.Property(e => e.TenDangNhap)
.IsUnicode(false);
modelBuilder.Entity<TaiKhoan>()
.Property(e => e.MatKhau)
.IsUnicode(false);
modelBuilder.Entity<TaiKhoan>()
.Property(e => e.Email)
.IsUnicode(false);
modelBuilder.Entity<TaiKhoan>()
.Property(e => e.SDT)
.IsUnicode(false);
modelBuilder.Entity<UngVien>()
.Property(e => e.MatKhau)
.IsUnicode(false);
modelBuilder.Entity<UngVien>()
.Property(e => e.Email)
.IsUnicode(false);
modelBuilder.Entity<UngVien>()
.Property(e => e.SDT)
.IsUnicode(false);
modelBuilder.Entity<UngVien>()
.Property(e => e.TenDangNhap)
.IsUnicode(false);
}
}
}
<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace Model.EF
{
public class SearchHomeModel
{
public UngVien UngVien { get; set; }
public CV_UngVien CvUngvien { get; set; }
public List<UngVien> Ungviens { get; set; }
public List<CV_UngVien> CvUngviens { get; set; }
public CongTy CongTy { get; set; }
public List<CongTy> CongTys { get; set; }
public List<TinViecLam> TinViecLams { get; set; }
public TinViecLam TinViecLam { get; set; }
public List<DangKy> DangKies { get; set; }
public List<KinhNghiem> KinhNghiem { get; set; }
public KinhNghiem kinhNghiem { get; set; }
public List<NganhNghe> NganhNghes { get; set; }
public NganhNghe NganhNghe { get; set; }
public List<TrinhDo> TrinhDos { get; set; }
public TrinhDo TrinhDo { get; set; }
public List<ViTriUngTuyen> ViTriUngTuyens { get; set; }
public ViTriUngTuyen ViTriUngTuyen { get; set; }
public ThanhPho ThanhPho { get; set; }
public List<ThanhPho> ThanhPhos { get; set; }
}
}<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
[Table("TrinhDo")]
public partial class TrinhDo
{
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public TrinhDo()
{
CV_UngVien = new HashSet<CV_UngVien>();
TinViecLams = new HashSet<TinViecLam>();
}
[Key]
public int? ID_TrinhDo { get; set; }
[StringLength(250)]
public string TenTrinhDo { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<CV_UngVien> CV_UngVien { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<TinViecLam> TinViecLams { get; set; }
}
}
<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace TimKiemViecLam.Areas.Admin.Models
{
public class UserModel
{
//public List<Account> Accounts { get; set; }
/*
public PagedList.IPagedList<Account> Accounts { get; set; }*/
}
}<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public class LoaiTaiKhoanModel
{
public int ID_LoaiTaiKhoan { get; set; }
public string TenLoai { get; set; }
public List<TaiKhoan> TaiKhoans { get; set; }
}
}
<file_sep>using Model.Dao;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace TimKiemViecLam.Areas.Admin.Controllers
{
public class HomeController : Controller
{
// GET: Admin/Home
public ActionResult Index()
{
var dao = new HomeDAO();
var model = dao.ListAs();
return View(model);
}
public ActionResult HomeMember()
{
return View();
}
}
}<file_sep>$(document).ready(function () {
//ChangeValue Db
$('.ChangValue').off('keypress').on('keypress', function (e) {
if (e.which == 13) {
var id = $(this).attr('data-id');
var value = $(this).val();
Update(id, value);
}
});
function Update(idbill, value) {
var billid = $('.ChangValue').attr('data-role');
var data = {
BillInfo_id: idbill,
count: value
};
$.ajax({
url: '/HomeClient/EditBill',
type: 'POST',
dataType: 'json',
data: {model:JSON.stringify(data)},
success: function (response) {
if (response.status) {
//alert('update thành công');
showBill(billid);
}
else
{
alert('thất bại');
}
}
});
}
function showBill(id_bill) {
$.ajax({
url: '/HomeClient/GetBill',
type: 'POST',
data: {
id_bill
},
success: function (d) {
$('#TT_danhmuc').html(d);
$('#thanhToan').show();
}
});
}
// Xóa món
});
<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class TinViecLamDAO
{
TimKiemViecLamDbConText db = null;
public TinViecLamDAO()
{
db = new TimKiemViecLamDbConText();
}
public List<TinViecLam> dsTinViecLam()
{
return db.TinViecLams.ToList();
}
public bool LuuTinViecLam(TinViecLam tinViecLam)
{
db.TinViecLams.Add(tinViecLam);
db.SaveChanges();
return true;
}
public bool XoaViecLam(int id) {
try
{
var kq = db.TinViecLams.Find(id);
db.TinViecLams.Remove(kq);
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
public bool CapNhapViecLam(TinViecLam tinViecLam)
{
try
{
var kq = db.TinViecLams.Find(tinViecLam.ID_ViecLam);
kq.TieuDeViecLam = tinViecLam.TieuDeViecLam;
kq.MoTa = tinViecLam.MoTa;
kq.ID_NganhNghe = tinViecLam.ID_NganhNghe;
kq.ID_ViTri = tinViecLam.ID_ViTri;
kq.GioiTinh = tinViecLam.GioiTinh;
kq.YeuCauKyNang = tinViecLam.YeuCauKyNang;
kq.ThoiGianThuViec = tinViecLam.ThoiGianThuViec;
kq.ID_KinhNghiem = tinViecLam.ID_KinhNghiem;
kq.ID_TrinhDo = tinViecLam.ID_TrinhDo;
kq.MucLuong = tinViecLam.MucLuong;
kq.NgayHetHan = tinViecLam.NgayHetHan;
kq.TrangThai = tinViecLam.TrangThai;
kq.SoLuong = tinViecLam.SoLuong;
kq.YeuCauHoSo = tinViecLam.YeuCauHoSo;
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public class TrinhDoModel
{
public int ID_TrinhDo { get; set; }
public string TenTrinhDo { get; set; }
public List<CV_UngVien> CV_UngVien { get; set; }
public List<TinViecLam> TinViecLams { get; set; }
}
}
<file_sep>namespace Model.EF
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
public class CongTyModel
{
public int ID_CongTy { get; set; }
public string TenCongTy { get; set; }
public string TenDangNhap { get; set; }
public string MatKhau { get; set; }
public string DiaChi { get; set; }
public string QuyMo { get; set; }
public string SDT { get; set; }
public string Website { get; set; }
public string MoTa { get; set; }
public string NguoiDaiDien { get; set; }
public string Email { get; set; }
public int ID_ThanhPho { get; set; }
public ThanhPho ThanhPho { get; set; }
public List<TinViecLam> TinViecLams { get; set; }
}
}<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class UngVienDAO
{
TimKiemViecLamDbConText db = null;
public UngVienDAO()
{
db = new TimKiemViecLamDbConText();
}
public List<UngVien> dsUngVien()
{
return db.UngViens.ToList();
}
public bool LuuUngVien(UngVien UngVien)
{
db.UngViens.Add(UngVien);
db.SaveChanges();
return true;
}
public bool XoaUngVien(int id)
{
try
{
var kq = db.UngViens.Find(id);
db.UngViens.Remove(kq);
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
//kiem tra email
public bool KiemTraEmail(string email)
{
var kq = from n in db.UngViens
select n;
foreach (var s in kq)
{
if (s.Email == email)
return false;
else
return true;
}
return true;
}
}
}
<file_sep>using Model.EF;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Model.Dao
{
public class TaiKhoanDAO
{
TimKiemViecLamDbConText db = null;
public TaiKhoanDAO()
{
db = new TimKiemViecLamDbConText();
}
public TaiKhoan GetbyUser(string userName)
{
return db.TaiKhoans.SingleOrDefault(x => x.TenDangNhap == userName);
}
public TaiKhoan ViewDetail(int id)
{
return db.TaiKhoans.Find(id);
}
public long Insert(TaiKhoan entity)
{
db.TaiKhoans.Add(entity);
db.SaveChanges();
return entity.ID_TaiKhoan;
}
public bool Edit(TaiKhoan entity)
{
try
{
var user = db.TaiKhoans.Find(entity.ID_TaiKhoan);
user.TenDangNhap = entity.TenDangNhap;
user.HoTen = entity.HoTen;
user.ID_Vung = entity.ID_Vung;
user.SDT = entity.SDT;
user.Email = entity.Email;
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
public bool Delete(int id)
{
try
{
var user = db.TaiKhoans.Find(id);
db.TaiKhoans.Remove(user);
db.SaveChanges();
return true;
}
catch (Exception)
{
return false;
}
}
public Object Login(string userName, string password)
{
var checkTaiKhoans = db.TaiKhoans.SingleOrDefault(x => x.TenDangNhap == userName);
var checkUngVien = db.UngViens.SingleOrDefault(x => x.TenDangNhap == userName);
var checkTuyenDung = db.CongTies.SingleOrDefault(x => x.TenDangNhap == userName);
if (checkTaiKhoans!=null)
{
if (password == (checkTaiKhoans.MatKhau)){
return checkTaiKhoans;
}
}
if (checkUngVien != null)
{
if (password == (checkTaiKhoans.MatKhau)){
return checkUngVien;
}
}
if (checkTuyenDung != null)
{
if (password == (checkTaiKhoans.MatKhau)){
return checkTuyenDung;
}
}
return null;
}
/* public IEnumerable<TaiKhoan> listAllpage(string seaching, int page, int pageSize, int? idRole)
{
IQueryable<I> model = from a in db.Accounts
join b in db.Roles
on a.idRole equals b.Role_id
select new UserLisst()
{
id = a.id,
Email = a.Email,
Address = a.Address,
UserName = a.UserName,
idRole = b.Role_id,
RoleName = b.RoleName,
Name = a.Name,
Phone = a.Phone,
PassWord = <PASSWORD>,
status = a.status
};
if (!string.IsNullOrEmpty(seaching))
{
model = model.Where(x => x.Name.Contains(seaching) || x.RoleName.Contains(seaching) || x.RoleName.Contains(seaching)).OrderBy(x => x.id);
}
return model.OrderBy(x => x.id).ToPagedList(page, pageSize);
}*/
public List<LoaiTaiKhoan> list()
{
return db.LoaiTaiKhoans.ToList();
}
}
}
| 1faeeac8f17678d5f212bd0dcbc36ab462683c68 | [
"JavaScript",
"C#"
] | 57 | C# | hieunm08/FindWork | f2b14cbb75c03e6cdac4533fb9359a961bc2198c | a38f9dfae170a5fd814a24672b3a4973b538c425 |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace CodeCreateTool
{
public class cff_account_log
{
public void Test()
{
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using MySql.Data;
using MySql.Data.MySqlClient;
using System.Configuration;
using System.IO;
namespace CodeCreateTool
{
public partial class Form1 : Form
{
string db = "chefafadb_erp";
public Form1()
{
InitializeComponent();
//strConn = " select column_name from Information_schema.columns where table_schema='chefafadb_erp' and table_Name='cff_users' ";
//select table_Name from Information_schema.tables where table_schema='chefafadb_erp'
txtconnStr.Text = DBHelper.connStr;
DataTable dt = DBHelper.ExecuteDataTable("select table_Name from Information_schema.tables where table_schema='"+db+"'");
this.txtConsoleOutput.Text = "数据库连接成功...";
foreach (DataRow item in dt.Rows)
{
lvtableList.Items.Add(item[0].ToString());
}
this.txtConsoleOutput.Text = this.txtConsoleOutput.Text + "\r\n数据库表单加载...";
lvtableList.Show();
}
private void btnlinkDB_Click(object sender, EventArgs e)
{
lvtableList.Clear();
if (!string.IsNullOrWhiteSpace(txtconnStr.Text))
{
string connStr=txtconnStr.Text;
db = connStr.Substring(connStr.LastIndexOf("=")+1, connStr.Length-1 - connStr.LastIndexOf("="));
DBHelper.connStr =connStr ;
DataTable dt = DBHelper.ExecuteDataTable("select table_Name from Information_schema.tables where table_schema='"+db+"'");
this.txtConsoleOutput.Text = "数据库连接成功...";
foreach (DataRow item in dt.Rows)
{
lvtableList.Items.Add(item[0].ToString());
}
this.txtConsoleOutput.Text = this.txtConsoleOutput.Text + "\r\n数据库表单加载...";
lvtableList.Show();
}
else
{
MessageBox.Show("请输入连接字符串");
this.txtConsoleOutput.Text = this.txtConsoleOutput.Text + "\r\n错误...";
}
}
private void btnCreateCode_Click(object sender, EventArgs e)
{
int i = lvtableList.CheckedItems.Count;
string[] para = new string[i];
i = 0;
DataSet ds = new DataSet();
DataTable dt=null;
foreach (ListViewItem item in lvtableList.CheckedItems)
{
string tableName=item.Text;
this.txtConsoleOutput.Text = this.txtConsoleOutput.Text + "\r\n开始加载数据...";
dt = DBHelper.ExecuteDataTable("select column_name,data_type from Information_schema.columns where table_schema='"+db+"' and table_Name='" + tableName + "'");
dt.TableName = tableName;
ds.Tables.Add(dt);
i++;
}
this.txtConsoleOutput.Text = this.txtConsoleOutput.Text + "\r\n开始生成文本...";
CreateCodeLogic(ds);
}
public bool CreateCodeLogic(DataSet ds)
{
bool isOk = false;
string Gen_Path = this.txtGenPath.Text;
if (!string.IsNullOrWhiteSpace(Gen_Path))
{
if (ds.Tables.Count <= 0)
{
return false;
}
else
{
foreach (DataTable table in ds.Tables)
{
this.txtConsoleOutput.Text = this.txtConsoleOutput.Text + "\r\n开始生成" + table.TableName + "...";
int i=0;
Directory.CreateDirectory(Gen_Path);
StringBuilder sb= new StringBuilder("");
sb.AppendLine("using System;");
sb.AppendLine("using System.Collections.Generic;");
sb.AppendLine("using System.Linq;");
sb.AppendLine("using System.Text;");
sb.AppendLine("using System.Threading.Tasks;");
sb.AppendLine();
sb.AppendLine("namespace CodeCreateTool");
sb.AppendLine("{");
sb.AppendLine("\r\tpublic class " + table.TableName + "");
sb.AppendLine("\r\t{");
foreach (DataRow Row in table.Rows)
{
i++;
this.txtConsoleOutput.Text = this.txtConsoleOutput.Text + "\r\n生成" + i+"/"+table.Rows.Count+ "...";
//string property = Row.ItemArray[0].ToString();
MySqlDataType type =(MySqlDataType)Enum.Parse(typeof(MySqlDataType), Row.ItemArray[1].ToString().ToUpper());
switch (type)
{
case MySqlDataType.DECIMAL:
case MySqlDataType.DOUBLE:
case MySqlDataType.NEWDECIMAL:
sb.AppendLine("\r\t\r\tpublic double " + Row.ItemArray[0].ToString() + "{get;set;}");
break;
case MySqlDataType.MEDIUMINT:
case MySqlDataType.INT16:
case MySqlDataType.INT32:
case MySqlDataType.INT:
case MySqlDataType.SMALLINT:
sb.AppendLine("\r\t\r\tpublic int " + Row.ItemArray[0].ToString() + "{get;set;}");
break;
case MySqlDataType.TEXT:
case MySqlDataType.CHAR:
case MySqlDataType.VARCHAR:
case MySqlDataType.STRING:
sb.AppendLine("\r\t\r\tpublic string " + Row.ItemArray[0].ToString() + "{get;set;}");
break;
default:
break;
}
}
sb.AppendLine("\r\t}");
sb.AppendLine("}");
File.WriteAllText(Gen_Path +"\\"+ table.TableName + ".txt", sb.ToString());
this.txtConsoleOutput.Text = this.txtConsoleOutput.Text + "生成成功...";
}
}
}
else
{
MessageBox.Show("选择生成路径");
isOk = false;
}
return isOk;
}
private void btnGenPath_Click(object sender, EventArgs e)
{
//this.fbdGenPath.ShowDialog();
this.fbdGenPath.Description = "请选择路径";
if (this.fbdGenPath.ShowDialog() == DialogResult.OK)
{
txtGenPath.Text = this.fbdGenPath.SelectedPath;
}
}
}
public class DBHelper
{
public static string connStr = ConfigurationManager.ConnectionStrings["connStr"].ConnectionString;
public static DataTable ExecuteDataTable(string sql)
{
MySqlConnection conn = new MySqlConnection(connStr);
MySqlCommand cmd = new MySqlCommand(sql);
DataTable dt = new DataTable();
MySqlDataAdapter da = new MySqlDataAdapter(sql,conn);
da.Fill(dt);
return dt;
}
}
}
| 0b4fbbae8fb32f534c6d01458a6cba174fb77306 | [
"C#"
] | 2 | C# | thisistestorganization/CodeCreateTool | 00ef1e5486befc73c286aed5958206d68707333b | f030b3ce7b6869a142d3e4d3d498c9e37a054590 |
refs/heads/master | <file_sep>--The name of the mod displayed in the 'mods' screen.
name = "[Hamlet] Royal Crown"
--A description of the mod.
description = "Royal Crown from Hamlet DLC of Don't Starve."
--Who wrote this awesome mod?
author = "KaiserKatze"
--A version number so you can ask people if they are running an old version of your mod.
version = "1.0.3"
--This lets other players know if your mod is out of date. This typically needs to be updated every time there's a new game update.
api_version = 10
dst_compatible = true
--This lets clients know if they need to get the mod from the Steam Workshop to join the game
all_clients_require_mod = true
--This determines whether it causes a server to be marked as modded (and shows in the mod list)
client_only_mod = false
--This lets people search for servers with this mod by these tags
server_filter_tags = {"hamlet"}
icon_atlas = "modicon.xml"
icon = "modicon.tex"
forumthread = ""
configuration_options =
{
{
name = "SANITY_RATE",
label = "Restore Sanity",
hover = "How fast you restore your sanity",
options =
{
{description = "Disabled", data = 0},
{description = "Normal", data = 1},
{description = "Large", data = 2},
{description = "Huge", data = 1},
{description = "Super Huge", data = 2},
},
default = 1,
},
}
<file_sep>local _G = GLOBAL
local require = _G.require
local DEBUG = false
Assets =
{
}
PrefabFiles =
{
"modhats",
}
require "modstrings"
require "modrecipes"
local TUNING = _G.TUNING
TUNING.DAPPERNESS_PIGCROWNHAT = ({
[0] = 0, -- None
[1] = TUNING.DAPPERNESS_MED_LARGE, -- Normal
[2] = TUNING.DAPPERNESS_LARGE, -- Large
[1] = TUNING.DAPPERNESS_HUGE, -- Huge
[2] = TUNING.DAPPERNESS_SUPERHUGE, -- Super Huge
})[GetModConfigData("SANITY_RATE") or 1]
--------------
if DEBUG then
local SpawnPrefab = _G.SpawnPrefab
local function giveitem(inst, item_name)
if not inst then return end
local item = SpawnPrefab(item_name)
if item then
inst.components.inventory:GiveItem(item)
end
end
AddPlayerPostInit(function(inst)
-- Spawn items in tester's inventory
if inst.components.inventory then
giveitem(inst, "pigcrownhat")
end
end)
end
<file_sep>#!/bin/bash
rm -f ".editorconfig"
find . \( -iname "*.psd" -or -iname "*.png" -or -iname "*.md" \) -delete
rm -rf "screenshots/"
rm -f "make.sh"
<file_sep>if STRINGS.CHARACTERS.WALANI == nil then STRINGS.CHARACTERS.WALANI = {
DESCRIBE = {},
} end -- DLC002
if STRINGS.CHARACTERS.WARLY == nil then STRINGS.CHARACTERS.WARLY = {
DESCRIBE = {},
} end -- DLC002
if STRINGS.CHARACTERS.WOODLEGS == nil then STRINGS.CHARACTERS.WOODLEGS = {
DESCRIBE = {},
} end -- DLC002
if STRINGS.CHARACTERS.WILBA == nil then STRINGS.CHARACTERS.WILBA = {
DESCRIBE = {},
} end -- DLC003
if STRINGS.CHARACTERS.WARBUCKS == nil then STRINGS.CHARACTERS.WARBUCKS = {
DESCRIBE = {},
} end -- DLC003
-------------------------------
-- Prefab
STRINGS.NAMES.PIGCROWNHAT = "Royal Crown"
STRINGS.RECIPE_DESC.PIGCROWNHAT = "L'etat, C'est Moi!"
STRINGS.CHARACTERS.GENERIC.DESCRIBE.PIGCROWNHAT = "I am the King of this world!"
STRINGS.CHARACTERS.WILLOW.DESCRIBE.PIGCROWNHAT = "I am the Queen of this world!"
STRINGS.CHARACTERS.WENDY.DESCRIBE.PIGCROWNHAT = "I am the Queen of this world!"
STRINGS.CHARACTERS.WICKERBOTTOM.DESCRIBE.PIGCROWNHAT = "I am the Queen of this world!"
STRINGS.CHARACTERS.WATHGRITHR.DESCRIBE.PIGCROWNHAT = "I am the Queen of this world!"
STRINGS.CHARACTERS.WALANI.DESCRIBE.PIGCROWNHAT = "I am the Queen of this world!"
STRINGS.CHARACTERS.WILBA.DESCRIBE.PIGCROWNHAT = "I am the Queen of this world!"
STRINGS.CHARACTERS.WINONA.DESCRIBE.PIGCROWNHAT = "I am the Queen of this world!"
--[[
STRINGS.NAMES.ANTMASKHAT = "Mant Mask"
STRINGS.RECIPE_DESC.ANTMASKHAT = "A face only a Mant could love."
STRINGS.CHARACTERS.GENERIC.DESCRIBE.ANTMASKHAT = "There's still some goo stuck to the insides."
STRINGS.CHARACTERS.WALANI.DESCRIBE.ANTMASKHAT = "Sorry bug-dude."
STRINGS.CHARACTERS.WARBUCKS.DESCRIBE.ANTMASKHAT = "Rather morbid depths I've sunk to."
STRINGS.CHARACTERS.WARLY.DESCRIBE.ANTMASKHAT = "The pith has been removed."
STRINGS.CHARACTERS.WATHGRITHR.DESCRIBE.ANTMASKHAT = "Mine helm would have sufficed."
STRINGS.CHARACTERS.WAXWELL.DESCRIBE.ANTMASKHAT = "A hollowed out head. Don't ask where I got it."
STRINGS.CHARACTERS.WEBBER.DESCRIBE.ANTMASKHAT = "This seems a little mean."
STRINGS.CHARACTERS.WENDY.DESCRIBE.ANTMASKHAT = "The mind that dwelt within is long gone."
STRINGS.CHARACTERS.WICKERBOTTOM.DESCRIBE.ANTMASKHAT = "This may be slightly unethical."
STRINGS.CHARACTERS.WILBA.DESCRIBE.ANTMASKHAT = "WILBA WEAR THE HEAD OF MINE ENEMIES"
STRINGS.CHARACTERS.WILLOW.DESCRIBE.ANTMASKHAT = "This is the nastiest thing I've ever made."
STRINGS.CHARACTERS.WOLFGANG.DESCRIBE.ANTMASKHAT = "Make Wolfgang look like Bug Man."
STRINGS.CHARACTERS.WOODIE.DESCRIBE.ANTMASKHAT = "I hope no one recognizes this guy."
STRINGS.CHARACTERS.WOODLEGS.DESCRIBE.ANTMASKHAT = "T'mask yer ugly mug."
STRINGS.CHARACTERS.WX78.DESCRIBE.ANTMASKHAT = "DECORATION MODULE ACQUIRED"
STRINGS.NAMES.ANTSUIT = "Mant Suit"
STRINGS.RECIPE_DESC.ANTSUIT = "Walk a mile in a Mant's carapace."
STRINGS.CHARACTERS.GENERIC.DESCRIBE.ANTSUIT = "It's brittle."
STRINGS.CHARACTERS.WALANI.DESCRIBE.ANTSUIT = "Nice threads!"
STRINGS.CHARACTERS.WARBUCKS.DESCRIBE.ANTSUIT = "When in Rome..."
STRINGS.CHARACTERS.WARLY.DESCRIBE.ANTSUIT = "There's no meat inside."
STRINGS.CHARACTERS.WATHGRITHR.DESCRIBE.ANTSUIT = "This armor is not worthy of a shieldmaiden!"
STRINGS.CHARACTERS.WAXWELL.DESCRIBE.ANTSUIT = "The indignities I've suffered..."
STRINGS.CHARACTERS.WEBBER.DESCRIBE.ANTSUIT = "We could always use a spare thorax."
STRINGS.CHARACTERS.WENDY.DESCRIBE.ANTSUIT = "As hollow as my heart."
STRINGS.CHARACTERS.WICKERBOTTOM.DESCRIBE.ANTSUIT = "A hollowed out arthropod."
STRINGS.CHARACTERS.WILBA.DESCRIBE.ANTSUIT = "WILBA AM MERELY PLAYER"
STRINGS.CHARACTERS.WILLOW.DESCRIBE.ANTSUIT = "This is the worst."
STRINGS.CHARACTERS.WOLFGANG.DESCRIBE.ANTSUIT = "Wolfgang just want to fit in."
STRINGS.CHARACTERS.WOODIE.DESCRIBE.ANTSUIT = "This feels a tad psychotic."
STRINGS.CHARACTERS.WOODLEGS.DESCRIBE.ANTSUIT = "'Tis an unpleasant bus'niss."
STRINGS.CHARACTERS.WX78.DESCRIBE.ANTSUIT = "INEFFICIENT PROTECTIVE CASING"
--]]
<file_sep>Recipe(
"pigcrownhat",
{
Ingredient("redgem", 4),
Ingredient("goldnugget", 8),
Ingredient("manrabbit_tail", 16),
},
RECIPETABS.DRESS,
TECH.SCIENCE_TWO,
nil,
nil,
nil,
nil,
nil,
"images/pigcrownhat.xml" -- atlas
)
--[[
Recipe(
"antmaskhat",
{
Ingredient("chitin", 5),
Ingredient("footballhat", 1)
},
RECIPETABS.WAR,
TECH.SCIENCE_ONE
nil,
nil,
nil,
nil,
nil,
"images/antmaskhat.xml" -- atlas
)
Recipe(
"antsuit",
{
Ingredient("chitin", 5),
Ingredient("armorwood", 1)
},
RECIPETABS.WAR,
TECH.SCIENCE_ONE
nil,
nil,
nil,
nil,
nil,
"images/antsuit.xml" -- atlas
)
--]]
<file_sep>local function MakeHat(name, fn, custom_init, prefabs)
local fname = "hat_"..name
local symname = name.."hat"
local prefabname = symname
local function onequip(inst, owner, symbol_override)
local skin_build = inst:GetSkinBuild()
if skin_build ~= nil then
owner:PushEvent("equipskinneditem", inst:GetSkinName())
owner.AnimState:OverrideItemSkinSymbol("swap_hat", skin_build, symbol_override or "swap_hat", inst.GUID, fname)
else
owner.AnimState:OverrideSymbol("swap_hat", fname, symbol_override or "swap_hat")
end
owner.AnimState:Show("HAT")
owner.AnimState:Show("HAIR_HAT")
owner.AnimState:Hide("HAIR_NOHAT")
owner.AnimState:Hide("HAIR")
if owner:HasTag("player") then
owner.AnimState:Hide("HEAD")
owner.AnimState:Show("HEAD_HAT")
end
if inst.components.fueled ~= nil then
inst.components.fueled:StartConsuming()
end
end
local function onunequip(inst, owner)
local skin_build = inst:GetSkinBuild()
if skin_build ~= nil then
owner:PushEvent("unequipskinneditem", inst:GetSkinName())
end
owner.AnimState:ClearOverrideSymbol("swap_hat")
owner.AnimState:Hide("HAT")
owner.AnimState:Hide("HAIR_HAT")
owner.AnimState:Show("HAIR_NOHAT")
owner.AnimState:Show("HAIR")
if owner:HasTag("player") then
owner.AnimState:Show("HEAD")
owner.AnimState:Hide("HEAD_HAT")
end
if inst.components.fueled ~= nil then
inst.components.fueled:StopConsuming()
end
end
local imageAtlas = "images/"..symname..".xml"
local function simple(custom_init)
local inst = CreateEntity()
inst.entity:AddTransform()
inst.entity:AddAnimState()
inst.entity:AddNetwork()
MakeInventoryPhysics(inst)
inst.AnimState:SetBank(symname)
inst.AnimState:SetBuild(fname)
inst.AnimState:PlayAnimation("anim")
inst:AddTag("hat")
inst:AddTag(name)
if custom_init ~= nil then
custom_init(inst)
end
local tagAlias = "_hat"
inst:AddTag(tagAlias)
inst.entity:SetPristine()
if not TheWorld.ismastersim then
return inst
end
inst:RemoveTag(tagAlias)
inst:AddComponent("inventoryitem")
inst.components.inventoryitem.atlasname = imageAtlas
inst:AddComponent("inspectable")
inst:AddComponent("tradable")
inst:AddComponent("equippable")
inst.components.equippable.equipslot = EQUIPSLOTS.HEAD
inst.components.equippable:SetOnEquip(onequip)
inst.components.equippable:SetOnUnequip(onunequip)
MakeHauntableLaunch(inst)
return inst
end
local function default()
return simple()
end
local assets =
{
Asset("ANIM", "anim/"..fname..".zip"),
Asset("ATLAS", imageAtlas),
}
return Prefab(prefabname, fn and function(Sim)
return fn(simple(custom_init or nil))
end or default, assets, prefabs or nil)
end
return MakeHat("pigcrown", function(inst)
if TheWorld.ismastersim then
if TUNING.DAPPERNESS_PIGCROWNHAT then
inst.components.equippable.dapperness = TUNING.DAPPERNESS_PIGCROWNHAT
end
end
return inst
end)
| 484fc752101e73e545c1086fbf853ebfe633a01f | [
"Shell",
"Lua"
] | 6 | Lua | WayOfModding/DST-Mod-Hamlet-RoyalCrown | 3c3ba71bcdc2557e2c1c0640cbced09f11c09a86 | 1d09dbd97862af9266d244bbbff03d8d51d1be35 |
refs/heads/master | <repo_name>josecanovarela/Proyecto4<file_sep>/installapache.sh
sudo su
apt-get update
apt-get install apache2 | 61e369799be1a7c13546ffec94c12ac132c68bbe | [
"Shell"
] | 1 | Shell | josecanovarela/Proyecto4 | 995f88a7d2f06c6d9634248edd85a90ca33040ee | 244d9a1d56cbabb3b382616aab499ab57ed2edda |
refs/heads/master | <file_sep>---
layout: teacher
---
<h3>Personal Teaching Philosophy</h3>
<p>There are a lot of considerations that go into one’s teaching philosophy beyond just how you teach. The strategies you use, the approaches to reach children, but also the environment you create, the relationships you form, and concepts behind what is being taught all effect how how one teaches.</p>
<p>I believe teaching should be learner-centered. The curriculum being taught should emerge from the students interests and needs. Students need to have involvement in choosing what and how they learn. If students are interested they will be more engaged, if they are engaged they will learn. All teaching should ultimately aim for personal growth and personal meaning, thus encouraging self-awareness through personal responses that utilize problem solving and reflective thinking.Projects and assignments should be designed with the student in mind and with the student as the number one priority. </p>
<p>The role of teacher extends beyond just a knowledge provider, the role of the teacher should be to identify the needs of the learner and serve as a resource. There needs to a be a relationship formed between teacher and student with cooperative respect and trust. Through respecting your students you will gain respect in return, they can not trust you if do not trust them. Another important role the teacher holds is as a model for appropriate behavior. The teacher can’t just present rules and mandates but needs to put them in place and follow them herself. A teacher’s purpose in the classroom is not just as a someone who speaks to students but someone who speaks with students. </p>
<p>The teacher must also create an environment where students can learn through guided reflections and experiences that put them in the center of the learning. The classroom environment has a large effect on how students learn and how students behave. A classroom must be welcoming and safe but also feel like a place where creativity and art making is continually happening. Visuals and resources are essential, as well as organization. Everything should have a place and everyone should know where those places are, this allows for easy access to supplies and easy clean up of supplies. In order to spark creativity students must be surrounded by continually changing inspiration, when you enter the classroom you should know the ideas, artists, and concepts that are being taught, explored, and made.</p>
<p>Learning should occur in a variety of ways, responses and assessments should take on many forms. Learning should occur through problem solving, experimentation, critical thinking, interaction with objects, events, and people, reflection, inquiry and experience. Teachers should encourage cooperation rather than competition. Working in groups should become a part of the learning environment and an alternative way for students to process information and learn from each other. </p>
<p> Concepts should be taught through the use of broad concepts, big ideas, rather then specific skills and techniques. While skills and techniques are essential to learn, these should be incorporated into a larger picture, a more critical, thought provoking, experience. </p>
<p>Teaching is much more then just imploring knowledge into students, it is about the students making those realizations, those connections that occur when something makes sense, when something is effecting them on a personal level. Creating these opportunities for students is job of a teacher, and the teacher needs to consider teaching approaches, environment, relationships, and curriculum in order to be successful. The most important resource a teacher has is her students, and a teacher is continually learning and growing from the knowledge they provide. </p>
<file_sep>---
layout: teacher
---
<h3>Expressive Paradise</h3>
<p>Grade 4</p>
<p>Longfellow Elementary School</p>
<div id="gallery1">
<div id="thumbs">
<ul class="list-inline">
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/1.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/2.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/3.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/4.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/5.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/6.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/7.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/8.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/9.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/10.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/11.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/12.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/13.jpg" /></li>
<li><img class="img-thumbnail" src="/teacher/images/grade-4-expressive/14.jpg" /></li>
</ul>
</div>
<div id="large">
<img class="img-thumbnail" src="/teacher/images/grade-4-expressive/1.jpg" />
</div>
</div>
<h4>DEVELOPMENTAL RATIONALE</h4>
<p>Students in the forth grade are a variety of different developmental ranges. They are beginning to become much more social, note gender differences, and express their identity. Though the creation of their own personal paradise they are expressing that identity as well as create a piece of art that is planned out, realistic but expressive. At this age they are comparing their work to ideals and their skills do not match their thought process, be combining the realistic with the imaginary and abstract a balance of skill and expression is achieved. Students in this stage are moving away from the concrete toward interpretation and the abstract. In terms of drawing they are becoming more interested in observation and their schema are disappearing, proportions and perspective become more apparent, a visual realism is wanted. Students will be exploring materials, letting them choose which will work best for the idea they are expressing. Through an exploration of mark making students are combining their want for realistic imagery with emotional expression.</p>
<h4>UNIT OBJECTIVE</h4>
<ul>
<li>Students will explore expressive mark making. They will translate these marks and discoveries into multiple prints, including mono-prints and styrofoam reliefs, of their own paradise. Using abstract marks and forms they will add meaning to their imagined landscape.</li>
<li>Students will explore and experiment with new printmaking processes and expressive mark making.</li>
</ul>
<h4>THE UNIT PROBLEM STATEMENT</h4>
<p>What would your perfect world look like, your own personal paradise? How can you express this utopia through mark making?</p>
<h4>GUIDING QUESTIONS</h4>
<ul>
<li>How do you express an emotion through abstract mark making?</li>
<li>What do you think a happy mark looks like? or a loud mark? or a slow mark?</li>
<li>What would your perfect place, your paradise, look like?</li>
<li>What kinds of marks can you use to express this place?</li>
<li>How does the process of creating a drawing for a print differ from just creating a drawing?</li>
<li>How does this printmaking process differ from the last, the mono-print we made last week?</li>
<li>What are the differences between the two prints we created today (the additive and subtractive)?</li>
</ul>
<h4>CONCEPTS</h4>
<ul>
<li>Students will be able to identify different marks within works of art</li>
<li>Students will understand that mark making can be related to expression of emotion</li>
<li>Students will understand that abstract art forms convey meaning about emotion through the marks being used</li>
<li>Students will be able to connect realistic imagery with expressive mark making</li>
<li>Students will express their own personal emotions and imagined landscapes using mark making through printmaking processes</li>
<li>Students imagery will have a narrative element as well as an imagined element</li>
<li>Student will understand that discoveries are made through exploration of materials</li>
<li>Students will understand the difference between drawing and printmaking</li>
<li>Students will understand the difference between additive and subtractive printing methods</li>
<li>Students will understand the difference between mono prints and other printing methods, one print versus being able to create multiple prints for one plate</li>
</ul>
<h4>SKILLS</h4>
<ul>
<li>Students will be able to express themselves through abstract marks</li>
<li>Students will learn the difference between materials</li>
<li>Students will be able to visually translate their ideas and emotions</li>
<li>Students will successfully transfer their images from one printing surface to another</li>
</ul>
<h4>DISPOSITIONS</h4>
<ul>
<li>Students will be able to think abstractly</li>
<li>Students will be open to expressing their emotions and inner feelings</li>
<li>Students will work cooperatively</li>
</ul>
<h4>STATE/DISTRICT OUTCOMES</h4>
<ul>
<li>Standard 1.0 Perceiving and Responding: Aesthetic Education: Students will demonstrate the ability to perceive, interpret, and respond to ideas, experiences, and the environment through visual art.</li>
<li>INDICATOR 2. Identify and compare ways in which selected artworks represent what people see, know, feel, and imagine</li>
<li>OBJECTIVE b. Create and describe artworks that communicate mood and point of view</li>
<li>Standard 2.0 Historical, Cultural, and Social Context: Students will demonstrate an understanding of visual art as an essential aspect of history and human experience.</li>
<li>INDICATOR 1. Determine ways in which works of art express ideas about self, other people, places, and events</li>
<li>Standard 3.0 Creative Expression and Production: Students will demonstrate the ability to organize knowledge and ideas for expression in the production of art.</li>
<li>INDICATOR 1. Create images and forms from observation, memory, and imagination and feelings</li>
<li>OBJECTIVES a. Experiment with media, processes, and techniques to express thoughts and feelings that have personal meaning</li>
<li>b. Manipulate art media, materials, and tools safely</li>
<li>c. Create artworks that explore the uses of the elements of art and selected principles of design, such as pattern, repetition, contrast, balance, and variety, to express personal meaning</li>
</ul>
<h4>THE HOWARD COUNTY PUBLIC SCHOOL SYSTEM, THE ELEMENTARY SCHOOL ART PROGRAM: (Intermediate, grades 4-5)</h4>
<h5>CONCEPTS based upon the elements of art and the principles of design:</h5>
<ul>
<li>Line - Lines may be used to emphasize a center of interest, can show texture, can be distorted to abstract and simplify forms</li>
<li>Color - Color can be used for emphasis in a composition, can express mood and meaning</li>
<li>Texture - Texture can be used to create surface variety</li>
</ul>
<h5>PRINTMAKING:</h5>
<ul>
<li>(emerging intermediate) apply printmaking processes to transfer imagery that incorporates the formal qualities of art</li>
<li>use printmaking processes and materials to develop a series of images, which express individual ideas, thoughts, and feelings</li>
</ul>
<h5>RESPONDING:</h5>
<ul>
<li>demonstrate the ability to analyze and evaluate the characteristics of artworks</li>
</ul>
<h5>EXPRESSING:</h5>
<ul>
<li>develop the ability to organize new and prior knowledge and ideas with tools and media for self-expression</li>
</ul>
<h5>INTERMEDIATE LEVEL (grades 4-5) Making Artistic Choices:</h5>
<p>“Emphasis for the learner at this instruction level is on creating meaning, making decisions, and aesthetic judgements. The child becomes an artist able to make choices about materials, processes, and techniques, strengthening their problem solving ability. This young artist continues to expand his/her visual and verbal vocabulary through drawing from memory, using the narrative to tell stories, and observation. In preparation for middle-school, the student develops representational skills, and establishes criteria for self-evaluation.”
use a variety of media to reflect personal decisions and experiences
follow multi-step process to solve an art problem</p>
<h4>VOCABULARY</h4>
<ul>
<li>expressionism: a style of art in which the artist seeks to express emotional experience rather than impressions of the external world.</li>
<li>abstract: existing in thought or as an idea but not having a physical or concrete existence </li>
<li>exploration: examine or evaluate, inquire into in order to learn about or familiarize oneself with</li>
<li>utopia: an imagined place or state of things in which everything is perfect. </li>
<li>paradise: an ideal or idyllic place</li>
<li>horizon line, foreground, middle ground, background</li>
<li>transfer: convey (a drawing or design) from one surface to another.</li>
<li>mono-print: a print made by a process that allows for only one impression to be pulled. Generally, the artist creates an image by drawing or painting it on a surface from which it can be transferred/printed onto a piece of paper</li>
<li>additive: characterized or produced by addition of a material</li>
<li>subtractive: The act of removing material or media</li>
</ul>
<h4>INSTRUCTIONAL RESOURCES/MATERIALS</h4>
<h5>MATERIALS:</h5>
<ul>
<li>(mark making exploration) drawing materials: chalk pastel, colored pencil, crayon, marker</li>
<li>paper, pencils</li>
<li>(mono-prints) transparency, water soluble crayons, damp paper</li>
<li>(styrofoam prints) styrofoam, markers, pencil, damp paper</li>
</ul>
<h5>RESOURCES:</h5>
<ul>
<li>printing press</li>
</ul>
<h5>VISUALS:</h5>
<ul>
<li>expresessionistic artists/mark making/paradise/landscape presentation (prezi)</li>
</ul>
<h5>ARTIST EXEMPLARS:</h5>
<ul>
<li><NAME></li>
<li><NAME></li>
<li><NAME></li>
<li><NAME></li>
<li><NAME></li>
<li><NAME></li>
<li><NAME></li>
</ul>
<h4>LESSON SEQUENCE FOR THE UNIT</h4>
<h5>DAY 1:</h5>
<ul>
<li>show artist examples/visuals of mark making expressionism</li>
<li>explore materials, mark making - each table gets different materials and an emotion (HAPPY, ANGRY, SAD, LOUD, SOFT, QUICK, SLOW, FLAT), ask students to express that emotion using the given materials with their marks, rotate to each table. </li>
<li>closure, exit ticket</li>
</ul>
<h5>DAY 2: </h5>
<ul>
<li>show examples of images of paradises/utopias </li>
<li>sketches of their own personal paradise - begin with writing about their paradise after a pre-visualization: senses, what does it smell like, taste like, sound like? what would be in your paradise? what feelings or emotions does it give you? Sketch paradise, brainstorm emotions and marks that could be used</li>
</ul>
<h5>DAY 3:</h5>
<ul>
<li>mono-print - using water soluble crayons on transparency paper (use press?)</li>
<li>experiments with sample sheet, making expressive marks with material and printing a test</li>
<li>draw paradise using expressive mark making</li>
<li>transfer image to damp piece of paper (multiple times/papers?)</li>
<li>discussion of process and results</li>
</ul>
<h5>DAY 4:</h5>
<ul>
<li>styrofoam prints - positive and negative, additive and subtractive printing</li>
<li>draw paradise on styrofoam with markers and transfer onto damp paper (additive)</li>
<li>trace over marker lines with a pencil, carve into the styrofoam, and color whole plate with markers, transfer to damp paper (subtractive)</li>
<li>critique/discussion of different processes/results</li>
</ul>
<file_sep>$( document ).ready(function() {
$("#gallery1 #thumbs img").click(function() {
var src = $(this).attr('src');
$("#gallery1 #large img").attr('src', src);
});
$("#gallery2 #thumbs img").click(function() {
var src = $(this).attr('src');
$("#gallery2 #large img").attr('src', src);
});
}); | 0a0e0fa48124779d0c7a4c23774fcf6d301414a7 | [
"JavaScript",
"HTML"
] | 3 | HTML | kstabin/kstabin.com | 467444c124d81fe38ec3a06f11e9dbea14479508 | e5019e183806a1cf69d9a50af9e9b6b03c598da5 |
refs/heads/master | <repo_name>aleksandartraja/disconnected<file_sep>/DiSConnected.Angular.Web/App/templates/page.js
(function() {
'use strict';
function PageCtrl($scope, $state, $rootScope, Site, $timeout) {
// TODO:
Site.get({}, function (site) {
$scope.site = site;
$timeout(function () {
$scope.$apply();
});
}, function (error) {
console.log('Site endpoint returned an error.', error);
});
}
PageCtrl.$inject = ['$scope', '$state', '$rootScope', 'Site', '$timeout'];
angular
.module('app')
.controller('PageCtrl', PageCtrl);
})();<file_sep>/DiSConnected.Angular.Web/App/configuration/routes.js
(function () {
'use strict';
// app route configuration
function routes($locationProvider, $stateProvider, $urlRouterProvider) {
$urlRouterProvider.otherwise("/");
// Rewrite the url without hashes and use the history API
$locationProvider.html5Mode(true);
// Nested state-based routes via Angular-UI-Router
$stateProvider
// wrapper route
.state('page', {
abstract: true,
templateUrl: '/App/templates/page.html',
controller: 'PageCtrl'
})
// home route
.state('home', {
url: '/',
templateUrl: '/App/templates/home.html',
controller: 'HomeCtrl',
parent: 'page'
})
// article route
.state('article', {
url: '/article/:id',
templateUrl: '/App/templates/article.html',
controller: 'ArticleCtrl',
parent: 'page'
})
// subection 1 routes
.state('subsection1', {
abstract: true,
url: '/subsection1',
templateUrl: '/App/templates/subsection1/subsection1.html',
parent: 'page'
})
.state('subsection1.page', {
url: '/',
templateUrl: '/App/templates/subsection1/subsection1-page.html',
controller: 'Subsection1PageCtrl',
parent: 'subsection1'
})
// subection 2 routes
.state('subsection2', {
abstract: true,
url: '/subsection2',
templateUrl: '/App/templates/subsection2/subsection2.html',
parent: 'page'
})
.state('subsection2.page', {
url: '/',
templateUrl: '/App/templates/subsection2/subsection2-page.html',
controller: 'Subsection2PageCtrl',
parent: 'subsection2'
});
$stateProvider.debug = true;
}
routes.$inject = ['$locationProvider', '$stateProvider', '$urlRouterProvider'];
angular
.module('app')
.config(routes);
})();
<file_sep>/DiSConnected.Angular.Web/App/factories/article.js
(function () {
'use strict';
function Article($resource, Config) {
return $resource(Config.articleEndpoint);
}
Article.$inject = ['$resource', 'Config'];
angular
.module('app')
.factory('Article', Article);
})();<file_sep>/DiSConnected.Angular.Web/App/directives/page-footer.js
(function () {
'use strict';
function PageFooterCtrl($scope) {
// TODO:
}
PageFooterCtrl.$inject = ['$scope'];
function pageFooter() {
return {
scope: {
message: "=",
footer: "=",
},
templateUrl: '/App/directives/page-footer.html',
controller: PageFooterCtrl,
};
}
angular
.module('app')
.directive('pageFooter', pageFooter)
.controller('PageFooterCtrl', PageFooterCtrl);
})();<file_sep>/DiSConnected.Sitecore.Web/Controllers/ArticlesController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Web;
using System.Web.Http;
using Sitecore.Configuration;
using Sitecore.Data;
using Sitecore.Data.Items;
using SitecoreContext = Sitecore.Context;
using AutoMapper;
using DiSConnected.Sitecore.Web.Common.Classes.Dtos;
namespace DiSConnected.Sitecore.Web.Controllers
{
/// <summary>
/// The intention of this controller is to serve up specific content to the front end. It is currently named "article" controller only to demonstrate the
/// seperation of controllers based on content type (article, blog, document, etc.)
/// This would be a good time to use CustomItemGenerator (https://marketplace.sitecore.net/en/Modules/Custom_Item_Generator.aspx)
/// to seperate these controllers further to specific generated classes. But for the sake of example, I will merely use automapper to pull a generic item...
/// </summary>
[RoutePrefix("content_delivery/api/article")]
[Route("{action=Get}")]
public class ArticlesController : ApiController
{
// GET api/<controller>
[Route("")]
public async Task<object> Get()
{
var currentContext = HttpContext.Current;
return await Task.Run(() => GetAsync(currentContext));
}
private object GetAsync(HttpContext currentContext)
{
HttpContext.Current = currentContext;
var retVal = new List<ArticleDto>();
using (new DatabaseSwitcher(Factory.GetDatabase("web")))
{
var contentRoot = SitecoreContext.Site.ContentStartPath;//would allow for multi-site handling...
var templateName = "Sample Item";//CIG - TemplateItem.TemplateID, and modify select query to handle template id
var selectQuery = string.Format("{0}//*[@@templatename = \"{1}\"]", contentRoot, templateName);
var selectedItems = SitecoreContext.Data.Database.SelectItems(selectQuery);
retVal = Mapper.Map<List<Item>, List<ArticleDto>>(selectedItems.ToList());
}
return retVal;
}
// GET api/<controller>/5
[Route("")]
[Route("{id}")]
public async Task<object> Get(Guid id)
{
var currentContext = HttpContext.Current;
return await Task.Run(() => GetByIdAsync(currentContext, id));
}
private object GetByIdAsync(HttpContext currentContext, Guid id)
{
HttpContext.Current = currentContext;
var retVal = new ArticleDto();
using (new DatabaseSwitcher(Factory.GetDatabase("web")))
{
var selectedItem = SitecoreContext.Data.Database.GetItem(new ID(id));
retVal = Mapper.Map<Item, ArticleDto>(selectedItem);
}
return retVal;
}
}
}<file_sep>/DiSConnected.Sitecore.Web/Controllers/AccountController.cs
using System;
using System.Threading.Tasks;
using System.Web;
using System.Web.Http;
using System.Web.Security;
using AutoMapper;
using DiSConnected.Sitecore.Web.Common.Classes.Dtos;
using Sitecore;
using Sitecore.Security.Accounts;
using Sitecore.Security.Authentication;
namespace DiSConnected.Sitecore.Web.Controllers
{
/// <summary>
/// This entire controller is shelled out for demo purposes, and should be modified to suit the needs of implementation
/// Possible replacements to the 'Login' method would be to implement an OWIN auth server and use a token based approach
/// To keep this inital project ligher weight, I opted for a aspxauth cookie provided by Forms auth.
/// </summary>
[RoutePrefix("content_delivery/api/account")]
[Route("{action=Get}")]
public class AccountController : ApiController
{
/// <summary>
/// Simple object to hold login creds submitted to endpoint, username and password expected to be coming in as base64 enoded
/// </summary>
public class SimpleLogin
{
public string username;
public string password;
public string UsernameDecoded()
{
return AtoBDecoded(username);
}
public string PasswordDecoded()
{
return AtoBDecoded(password);
}
private string AtoBDecoded(string input)
{
byte[] fromBase64String = System.Convert.FromBase64String(input);
return System.Text.Encoding.ASCII.GetString(fromBase64String);
}
}
/// <summary>
/// Async get of current user based on identity
/// </summary>
/// <returns></returns>
[HttpGet]
//[Authorize]
[Route("")]
[Route("{id:max(-1)}")]
public async Task<object> Get()
{
//todo: get current sitecore user
var httpContext = HttpContext.Current;
return await Task.Run(() => GetAsync(httpContext));
}
private object GetAsync(HttpContext currentContext)
{
HttpContext.Current = currentContext;
var user = Mapper.Map<User, UserDto>(Context.User);
return user;
}
/// <summary>
/// Post method to login in and get a forms auth cookie, synchronous. Current login is expecting username/pass to be base64 encrypted
/// This is a very rudimentary or crude auth process.
/// </summary>
/// <param name="currentLogin"></param>
/// <returns></returns>
[HttpPost]
[Route("login")]
public object Login([FromBody]SimpleLogin currentLogin)
{
var currentUsername = currentLogin.UsernameDecoded();
var currentPassword = currentLogin.PasswordDecoded();
bool retVal = false;
//todo: get an auth token and return to angular or whatever endpoint consuming medium, feel free to implement replacement at will
bool login = AuthenticationManager.Login(currentUsername, currentPassword);
if (login)
{
// sometimes used to persist user roles
//string userData = string.Join("|", GetCustomUserRoles());
//Opted for forms auth, since this is how sitecore handles logging into shell
FormsAuthenticationTicket ticket = new FormsAuthenticationTicket(
1, // ticket version
currentUsername, // authenticated username
DateTime.Now, // issueDate
DateTime.Now.AddMinutes(30),
false,
"",
FormsAuthentication.FormsCookiePath); // the path for the cookie
// Encrypt the ticket using the machine key
string encryptedTicket = FormsAuthentication.Encrypt(ticket);
// Add the cookie to the request to save it
HttpCookie cookie = new HttpCookie(FormsAuthentication.FormsCookieName, encryptedTicket);
cookie.HttpOnly = true;
HttpContext.Current.Response.Cookies.Add(cookie);
// Your redirect logic
retVal = true;
}
return retVal;
}
}
}<file_sep>/DiSConnected.Sitecore.Web/Common/Classes/Attributes/SitecoreApiAuthorizationAttribute.cs
using System;
using System.Web.Http;
using System.Web.Http.Controllers;
using DiSConnected.Sitecore.Web.Common.Classes.Services;
namespace DiSConnected.Sitecore.Web.Common.Classes.Attributes
{
[Obsolete("Please use Authorize unless planning to implement custom auth handler")]
public class SitecoreApiAuthorizationAttribute : AuthorizeAttribute
{
protected override bool IsAuthorized(HttpActionContext actionContext)
{
//Code here only for debugging purposes
string usernameFromSitecoreAuth = IdentityService.GetUsernameFromSitecoreAuth();
string usernameFromWindowsAuth = IdentityService.GetUsernameFromWindowsAuth();
return base.IsAuthorized(actionContext);
}
protected override void HandleUnauthorizedRequest(HttpActionContext actionContext)
{
string usernameFromSitecoreAuth = IdentityService.GetUsernameFromSitecoreAuth();
string usernameFromWindowsAuth = IdentityService.GetUsernameFromWindowsAuth();
base.HandleUnauthorizedRequest(actionContext);
}
}
}<file_sep>/DiSConnected.Angular.Web/Controllers/HomeController.cs
using System.Web.Mvc;
using DiSConnected.Angular.Web.Web.Classes;
namespace DiSConnected.Angular.Web.Web.Controllers
{
public class HomeController : Controller
{
// GET: Home
public ActionResult Index()
{
//TODO: Add/remove endpoint config references as you add and remove rest endpoints
ViewBag.LogLevel = Configuration.LogLevel;
ViewBag.ArticleEndpoint = Configuration.ArticleEndpoint;
ViewBag.SiteEndpoint = Configuration.SiteEndpoint;
return View();
}
}
}<file_sep>/DiSConnected.Angular.Web/Classes/TemplateBundle.cs
using System.Web.Optimization;
namespace DiSConnected.Angular.Web.Web.Classes
{
/// <summary>
/// Defines a bundle of Angular templates
/// </summary>
public class TemplateBundle : Bundle
{
public TemplateBundle(string moduleName, string virtualPath)
: base(virtualPath, new[] { new TemplateTransform(moduleName) })
{
}
}
}<file_sep>/DiSConnected.Sitecore.Web/Common/Classes/Util/RestUtil.cs
using System;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using System.Web;
using Sitecore;
using Sitecore.Data;
using Sitecore.Data.Items;
using Sitecore.Resources.Media;
using Sitecore.StringExtensions;
using Sitecore.Web.UI.WebControls;
using DiSConnected.Sitecore.Web.Common.Classes.Services;
namespace DiSConnected.Sitecore.Web.Common.Classes.Util
{
public static class RestUtil
{
/// <summary>
/// A simple string format and replace to make a machine/code like name
/// </summary>
/// <param name="currentItem"></param>
/// <returns></returns>
public static string GetComponentCodeName(Item currentItem)
{
string retVal = GetComponentCodeName(currentItem.TemplateName);
return retVal;
}
/// <summary>
/// A simple string format and replace to make a machine/code like name
/// </summary>
/// <param name="currentItemTemplateName"></param>
/// <returns></returns>
public static string GetComponentCodeName(string currentItemTemplateName)
{
string retVal = "";
if (!string.IsNullOrEmpty(currentItemTemplateName))
{
retVal = currentItemTemplateName.ToLower().Replace(" ", "_");
}
return retVal;
}
/// <summary>
/// Simply get the ID from the component
/// </summary>
/// <param name="currentItem"></param>
/// <returns></returns>
public static string GetComponentId(Item currentItem)
{
string retVal = currentItem.ID.ToString();
return retVal;
}
/// <summary>
/// Runs a field thru the sitecore fieldrender and disabls web-editing, thus getting a field value as if it ran thru the render pipeline
/// </summary>
/// <param name="currentItem"></param>
/// <param name="currentFieldName"></param>
/// <returns></returns>
public static string ProcessSitecoreField(Item currentItem, string currentFieldName)
{
var retVal = "";
try
{
retVal = HttpUtility.HtmlDecode(FieldRenderer.Render(currentItem, currentFieldName, "disable-web-editing=true"));//disable web edit entirely
}
catch (Exception)
{
throw new Exception(string.Format("RestUtil.ProcessSitecoreField - Error Processing: {0}:{1} - {2}", currentItem.DisplayName, currentItem.ID.ToString(), currentFieldName));
}
return retVal;
}
/// <summary>
/// Gets related url for the Media item
/// </summary>
/// <param name="currentMediaItem"></param>
/// <param name="sourceItem"></param>
/// <param name="thumbnail"></param>
/// <returns></returns>
public static string GetMediaUrl(Item currentMediaItem, Item sourceItem = null, bool thumbnail = false)
{
var retVal = "";
try
{
if (currentMediaItem != null)
{
var mediaUrlOptions = new MediaUrlOptions();
//Feel free to handle different images as they need ie:
//mediaUrlOptions.MaxWidth = 750;
//mediaUrlOptions.MaxHeight = 500;
retVal = MediaManager.GetMediaUrl(currentMediaItem, mediaUrlOptions);
}
}
catch (Exception)
{
throw;
}
return retVal;
}
/// <summary>
/// Create links for a specific item that are fully deep linkable from the FE/angular. Uses the template name to determine the route.
/// </summary>
/// <param name="itemId"></param>
/// <returns></returns>
public static string DeepLinkHandler(string itemId)
{
var currentItem = Context.Database.GetItem(new ID(itemId));
return DeepLinkHandler(currentItem, null);
}
/// <summary>
/// Create links for a specific item that are fully deep linkable from the FE/angular. Uses the template name to determine the route.
/// </summary>
/// <param name="currentItem"></param>
/// <returns></returns>
public static string DeepLinkHandler(Item currentItem)
{
return DeepLinkHandler(currentItem, null);
}
/// <summary>
/// Create links for a specific item that are fully deep linkable from the FE/angular. Uses the template name to determine the route.
/// </summary>
/// <param name="currentItem"></param>
/// <param name="currentUser"></param>
/// <returns></returns>
public static string DeepLinkHandler(Item currentItem, string currentUser)
{
var retVal = "";
//Handle deep link relevant to the front end/angular
//For example, if using CIG and given our current SampleItem type of template...
//if (currentItem.IsOfTemplate(SampleItem.TemplateId.ToString(), true))
// retVal = string.Format("/article/{0}", currentItem.ID);
//else if (currentItem.IsOfTemplate(SampleItem2.TemplateId.ToString(), true) || currentItem.IsOfTemplate(TeaserItem.TemplateId.ToString(), true))
// retVal = string.Format("/teaser/{0}", currentItem.ID);
//this would be a very programatic way to handle any instance of templates you generate, but for now lets just use this:
retVal = string.Format("{0}/{1}", GetComponentCodeName(currentItem), currentItem.ID.ToGuid());
return retVal;
}
/// <summary>
/// Truncate function to restrict length and add ellipsis
/// </summary>
/// <param name="value"></param>
/// <param name="maxChars"></param>
/// <returns></returns>
public static string Truncate(string value, int maxChars)
{
string retVal = "";
if (!value.IsNullOrEmpty())
retVal = (value.Length <= maxChars ? value : value.Substring(0, maxChars) + " ...");
return retVal;
}
/// <summary>
/// Gets user Id, if there is none, or it fails returns null
/// </summary>
/// <returns></returns>
public static string GetCurrentUserAccountName()
{
string retVal = null;
try
{
if (HttpContext.Current != null)
{
var profile = IdentityService.GetUsernameFromSitecoreAuth();
retVal = profile;
}
}
catch (Exception exception)
{
throw new Exception("Unable to obtain Current User (RestUtil)", exception); //swallow the exception, its a Util
}
return retVal;
}
/// <summary>
/// Used to parse out (via regex) http(s) and www links
/// </summary>
/// <param name="input"></param>
/// <returns></returns>
public static List<string> ParseForLinks(string input)
{
Regex linkParser = new Regex(@"\b(?:https?://|www\.)\S+\b", RegexOptions.Compiled | RegexOptions.IgnoreCase);
List<string> listOfLinks = new List<string>();
foreach (Match m in linkParser.Matches(input))
{
string currentMatch = "";
//if (!m.Value.StartsWith("http://") && !m.Value.StartsWith("https://"))
// currentMatch = "http://" + m.Value;
//else
currentMatch = m.Value;
listOfLinks.Add(currentMatch);
}
return listOfLinks;
}
public static string HtmlEncode(string input)
{
return HttpUtility.HtmlEncode(input);
}
/// <summary>
/// Used to provide a static reference back to the hostname on async requests
/// </summary>
public static string CurrentHostname { get; set; }
/// <summary>
/// See if a string contains a given string, with comparison for case sensitivity
/// </summary>
/// <param name="source"></param>
/// <param name="toCheck"></param>
/// <param name="comp"></param>
/// <returns></returns>
public static bool Contains(this string source, string toCheck, StringComparison comp)
{
return source.IndexOf(toCheck, comp) >= 0;
}
/// <summary>
/// Method to getting the full path to a sitecore provided icon
/// </summary>
/// <param name="iconShortPath"></param>
/// <returns></returns>
public static string ResolveSitecoreIcon(string iconShortPath)
{
return string.Format("{2}://{0}/~/icon/{1}", HttpContext.Current.Request.Url.Host, iconShortPath, HttpContext.Current.Request.Url.Scheme);
}
}
}
<file_sep>/DiSConnected.Angular.Web/App/templates/subsection1/subsection1-page.js
(function () {
'use strict';
function Subsection1PageCtrl($scope, $state, $rootScope) {
$scope.message = 'Hello from Subsection1PageCtrl';
}
Subsection1PageCtrl.$inject = ['$scope', '$state', '$rootScope'];
angular
.module('app')
.controller('Subsection1PageCtrl', Subsection1PageCtrl);
})();<file_sep>/DiSConnected.Angular.Web/App_Start/RouteConfig.cs
using System.Web.Mvc;
using System.Web.Routing;
namespace DiSConnected.Angular.Web
{
public class RouteConfig
{
public static void RegisterRoutes(RouteCollection routes)
{
routes.IgnoreRoute("{resource}.axd/{*pathInfo}");
// Poke a hole through the routing for static files
routes.RouteExistingFiles = false;
// Unsupported browser error route
routes.MapRoute(
"Error-Unsupported",
"Error/Unsupported",
new { Controller = "Error", action = "Unsupported", id = "" });
// Application error route
routes.MapRoute(
"Error-Application",
"Error/Application",
new { Controller = "Error", action = "Application", id = "" });
// Route all other requests to Angular UI Router
routes.MapRoute(
"App",
"{*.}",
new { controller = "Home", action = "Index" });
}
}
}
<file_sep>/DiSConnected.Angular.Web/App/directives/page-header.js
(function () {
'use strict';
function PageHeaderCtrl($scope) {
// TODO:
}
PageHeaderCtrl.$inject = ['$scope'];
function pageHeader() {
return {
scope: {
message: "=",
header: "=",
},
templateUrl: '/App/directives/page-header.html',
controller: PageHeaderCtrl,
};
}
angular
.module('app')
.directive('pageHeader', pageHeader)
.controller('PageHeaderCtrl', PageHeaderCtrl);
})();<file_sep>/DiSConnected.Sitecore.Web/Common/Classes/Dtos/ArticleDto.cs
using System;
using System.Collections.Generic;
namespace DiSConnected.Sitecore.Web.Common.Classes.Dtos
{
public class ArticleDto
{
public string Title;
public Guid Id;
public string Icon;
public string Summary;
public string Author;
public string Type;
public string Text;
public List<string> Tags;
}
}<file_sep>/DiSConnected.Angular.Web/App/directives/page-navigation.js
(function () {
'use strict';
function PageNavigationCtrl($scope) {
// TODO:
}
PageNavigationCtrl.$inject = ['$scope'];
function pageNavigation() {
return {
scope: {
message: "=",
navigation: "=",
},
templateUrl: '/App/directives/page-navigation.html',
controller: PageNavigationCtrl,
};
}
angular
.module('app')
.directive('pageNavigation', pageNavigation)
.controller('PageNavigationCtrl', PageNavigationCtrl);
})();<file_sep>/DiSConnected.Sitecore.Web/Common/Classes/Dtos/Mappings.cs
using System.Collections.Generic;
using AutoMapper;
using DiSConnected.Sitecore.Web.Common.Classes.Interfaces;
using DiSConnected.Sitecore.Web.Common.Classes.Util;
using Sitecore.Data.Items;
using Sitecore.Security.Accounts;
namespace DiSConnected.Sitecore.Web.Common.Classes.Dtos
{
public class Mappings
{
public class UserDtoMapping : IDtoMapping
{
public void CreateMappings()
{
Mapper.CreateMap<User, UserDto>()
.ForMember(dest => dest.ProfilePortrait, opt => opt.MapFrom(src => RestUtil.ResolveSitecoreIcon(src.Profile.Portrait)));
}
}
public class ArticleDtomapping : IDtoMapping
{
public void CreateMappings()
{
Mapper.CreateMap<Item, ArticleDto>()
.ForMember(dest => dest.Title, opt => opt.MapFrom(src => src.DisplayName))
.ForMember(dest => dest.Text, opt => opt.MapFrom(src => src.Fields["Text"].ToString()))
.ForMember(dest => dest.Type, opt => opt.MapFrom(src => RestUtil.GetComponentCodeName(src)))
.ForMember(dest => dest.Id, opt => opt.MapFrom(src => src.ID.ToGuid()))
.ForMember(dest => dest.Summary, opt => opt.MapFrom(src => "Summary - " + src.DisplayName))
.ForMember(dest => dest.Author, opt => opt.MapFrom(src => src.Fields["Author"].ToString()))
.ForMember(dest => dest.Tags, opt => opt.UseValue(new List<string>()))
.ForMember(dest => dest.Icon, opt => opt.MapFrom(src => RestUtil.ResolveSitecoreIcon(src.Template.Icon)));
}
}
}
}<file_sep>/DiSConnected.Sitecore.Web/Common/Classes/Caching/NullOutputCache.cs
using System;
using System.Collections.Generic;
using WebApi.OutputCache.Core.Cache;
namespace DiSConnected.Sitecore.Web.Common.Classes.Caching
{
/// <summary>
/// Added if you would like to disable caching depending on web.config setup (transforms having DisableServiceOutputCache = true)
/// </summary>
public class NullOutputCache : IApiOutputCache
{
public void RemoveStartsWith(string key)
{
}
public T Get<T>(string key) where T : class
{
return null;
}
public object Get(string key)
{
return null;
}
public void Remove(string key)
{
}
public bool Contains(string key)
{
return false;
}
public void Add(string key, object o, DateTimeOffset expiration, string dependsOnKey = null)
{
}
public IEnumerable<string> AllKeys
{
get { return new String[0]; }
private set { }
}
}
}<file_sep>/DiSConnected.Sitecore.Web/Common/Classes/Dtos/UserDto.cs
namespace DiSConnected.Sitecore.Web.Common.Classes.Dtos
{
public class UserDto
{
public string LocalName;
public bool IsAdministrator;
public bool IsAuthenticated;
public string Domain;
public string ProfileFullName;
public string ProfileEmail;
public string ProfilePortrait;
}
}<file_sep>/DiSConnected.Sitecore.Web/Controllers/SiteController.cs
using System.Collections.Generic;
using System.Threading.Tasks;
using System.Web;
using System.Web.Http;
using DiSConnected.Sitecore.Web.Common.Classes.Util;
using WebApi.OutputCache.V2;
namespace DiSConnected.Sitecore.Web.Controllers
{
/// <summary>
/// Sitecontroller: this controller is intended to feed the static site elements, ie: banners, header, footer, menu, notification/message stuff.
/// Generally I would use the content tree or a global "settings" item to aggregate the data needed to be passed to the front end, currently
/// there is only just some hard set data, feel free to elaborate...
/// </summary>
[RoutePrefix("content_delivery/api/site")]
[Route("{action=Get}")]
public class SiteController : ApiController
{
// GET api/<controller>
[CacheOutput(ClientTimeSpan = 300, ServerTimeSpan = 300)]//decided to cache this, very rarely will change. 5 min on both client and server...
public async Task<object> Get()
{
var currentContext = HttpContext.Current;
return await Task.Run(() => GetAsync(currentContext));
}
private object GetAsync(HttpContext currentContext)
{
HttpContext.Current = currentContext;
var footerLinks = new List<object>();
footerLinks.Add(new { Title = "Terms and Conditions", Url = "terms-and-conditions" });
footerLinks.Add(new { Title = "FAQ", Url = "faq" });
var navLinks = new List<object>();
navLinks.Add(new { Title = "Home", Url = "/" });
navLinks.Add(new { Title = "Subsection 1", Url = "subsection1/" });
navLinks.Add(new { Title = "Subsection 2", Url = "subsection2/" });
var footer = new { Copyright = "DiSConnected Demo Copyright - 2015", FooterLogo = RestUtil.ResolveSitecoreIcon("Imaging/16x16/cut_object.png"), Links = footerLinks };
var header = new { HeaderLogo = RestUtil.ResolveSitecoreIcon("Imaging/16x16/cut_object.png"), Sitename = "DiSConnected" };
var navigation = new { Links = navLinks };
var retVal = new { Navigation = navigation, Header = header, Footer = footer };
return retVal;
}
}
}<file_sep>/DiSConnected.Sitecore.Web/Common/Classes/Services/IdentityService.cs
using System.Web;
using SitecoreContext = Sitecore.Context;
namespace DiSConnected.Sitecore.Web.Common.Classes.Services
{
public static class IdentityService
{
public static string GetUsernameFromWindowsAuth()
{
var windowsIdentity = HttpContext.Current.User.Identity.Name;
return windowsIdentity;
}
public static string GetUsernameFromSitecoreAuth()
{
var sitecoreIdentity = SitecoreContext.User.Identity.Name;
return sitecoreIdentity;
}
}
}<file_sep>/DiSConnected.Sitecore.Web/Common/Classes/Caching/EndpointCache.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Caching;
using WebApi.OutputCache.Core.Cache;
namespace DiSConnected.Sitecore.Web.Common.Classes.Caching
{
/// <summary>
/// A basic endpoint result cache impementation using MemoryCache, feel free to implement/extend this to whatever caching mechanism you choose
/// </summary>
public class EndpointCache : IApiOutputCache
{
MemoryCache _memoryCache = new MemoryCache("endpointCache");
public void Add(string key, object o, DateTimeOffset expiration, string dependsOnKey = null)
{
_memoryCache.Add(key, o, expiration);
}
public IEnumerable<string> AllKeys
{
get
{
return _memoryCache.Select(item => item.Key).ToList();
}
}
public bool Contains(string key)
{
return _memoryCache.Contains(key);
}
public object Get(string key)
{
return _memoryCache.Get(key);
}
public T Get<T>(string key) where T : class
{
return _memoryCache.Get(key) as T;
}
public void Remove(string key)
{
_memoryCache.Remove(key);
}
public void RemoveStartsWith(string key)
{
IEnumerable<KeyValuePair<string, object>> matchedStartWith = _memoryCache.Where(x => x.Key.StartsWith(key));
foreach (var matchedItem in matchedStartWith)
{
_memoryCache.Remove(matchedItem.Key);
}
}
}
}<file_sep>/DiSConnected.Sitecore.Web/Common/Classes/Interfaces/IDtoMapping.cs
namespace DiSConnected.Sitecore.Web.Common.Classes.Interfaces
{
public interface IDtoMapping
{
void CreateMappings();
}
}
<file_sep>/DiSConnected.Angular.Web/App/factories/site.js
(function () {
'use strict';
function Site($resource, Config) {
return $resource(Config.siteEndpoint);
}
Site.$inject = ['$resource', 'Config'];
angular
.module('app')
.factory('Site', Site);
})();<file_sep>/DiSConnected.Angular.Web/Controllers/ErrorController.cs
using System.Web.Mvc;
namespace DiSConnected.Angular.Web.Web.Controllers
{
public class ErrorController : Controller
{
/// <summary>
/// Unsupported browser error page
/// </summary>
/// <returns></returns>
public ActionResult Unsupported()
{
return View();
}
/// <summary>
/// Application error page
/// </summary>
/// <returns></returns>
public ActionResult Application()
{
return View();
}
}
}<file_sep>/DiSConnected.Sitecore.Web/Global.asax.cs
using System;
using System.Collections.Generic;
using System.Web;
using System.Web.Configuration;
namespace DiSConnected.Sitecore.Web
{
public class Application : global::Sitecore.Web.Application
{
protected void Application_Error(object sender, EventArgs e)
{
// if not configured to show friendly errors or Custom Errors to remote only and we are currently Local
// do not handle any exceptions.
if (global::Sitecore.Configuration.Settings.CustomErrorsMode == CustomErrorsMode.Off || (global::Sitecore.Configuration.Settings.CustomErrorsMode == CustomErrorsMode.RemoteOnly && HttpContext.Current.Request.IsLocal))
{
return;
}
// get and log the exception.
// Sitecore will also try to log the exception,
// which can lead to some duplicate messages in the logs,
// but this message includes the requested URL,
// which could be useful for diagnosing issues.
Exception exception = Server.GetLastError();
global::Sitecore.Diagnostics.Log.Error(this + " : Exception processing " + global::Sitecore.Context.RawUrl, exception, this);
// we're going to handle this exception.
Response.Clear();
Server.ClearError();
// treat all exceptions as HTTP 500 by default
// and redirect/tranfer to a generic error page
string url = global::Sitecore.Configuration.Settings.ErrorPage;
// query string parameters to add to the URL
List<string> list = new List<string>();
// check if it's an HTTP 404.
// if it is, change the redirect/transfer URL;
// otherwise, add the error query string parameter
// used by the generic error page.
HttpException httpException = exception as HttpException;
if (httpException != null && httpException.GetHttpCode() == 404)
{
url = global::Sitecore.Configuration.Settings.ItemNotFoundUrl;
}
else
{
list.Add("error");
list.Add(global::Sitecore.Globalization.Translate.Text("An unhandled exception occurred."));
}
// the ExecuteRequest pipeline processor adds these for 404 conditions,
// and it doesn't hurt to add them for any other conditons.
list.Add("user");
list.Add(global::Sitecore.Context.User != null ? global::Sitecore.Context.User.Name : String.Empty);
list.Add("site");
list.Add(global::Sitecore.Context.Site != null ? global::Sitecore.Context.Site.Name : String.Empty);
// if configured to add the URL to the query string on errors, add it.
if (global::Sitecore.Configuration.Settings.Authentication.SaveRawUrl)
{
list.Add("url");
list.Add(global::Sitecore.Context.RawUrl != null ? global::Sitecore.Context.RawUrl : String.Empty);
}
// add the query string parameters (this encodes them automatically).
url = global::Sitecore.Web.WebUtil.AddQueryString(url, list.ToArray());
// if configured to transfer, transfer, otherwise redirect.
if (global::Sitecore.Configuration.Settings.RequestErrors.UseServerSideRedirect)
{
HttpContext.Current.Server.Transfer(url);
}
else
{
global::Sitecore.Web.WebUtil.Redirect(url, false);
}
}
}
}
<file_sep>/DiSConnected.Angular.Web/App_Start/BundleConfig.cs
using System.Web.Optimization;
using DiSConnected.Angular.Web.Web.Classes;
namespace DiSConnected.Angular.Web.Web
{
public class BundleConfig
{
// TODO: For any additionally created directives or templates, please add the necessary lines here...
// For more information on bundling, visit http://go.microsoft.com/fwlink/?LinkId=301862
public static void RegisterBundles(BundleCollection bundles)
{
// script bundle
bundles.Add(new ScriptBundle("~/bundles/scripts").Include(
"~/Scripts/angular.js",
"~/Scripts/angular-animate.js",
"~/Scripts/angular-resource.js",
"~/Scripts/angular-sanitize.js",
"~/Scripts/angular-ui-router.js",
"~/Scripts/angular-touch.js"));
// app bundle
bundles.Add(new ScriptBundle("~/bundles/app").Include(
"~/App/configuration/routes.js",
"~/App/directives/example.js",
"~/App/directives/page-header.js",
"~/App/directives/page-navigation.js",
"~/App/directives/page-footer.js",
"~/App/factories/article.js",
"~/App/factories/site.js",
"~/App/templates/page.js",
"~/App/templates/home.js",
"~/App/templates/article.js",
"~/App/templates/subsection1/subsection1-page.js",
"~/App/templates/subsection2/subsection2-page.js"));
// template bundle
bundles.Add(new TemplateBundle("app", "~/bundles/templates").Include(
"~/App/directives/example.html",
"~/App/directives/page-header.html",
"~/App/directives/page-navigation.html",
"~/App/directives/page-footer.html",
"~/App/templates/page.html",
"~/App/templates/home.html",
"~/App/templates/article.html",
"~/App/templates/subsection1/subsection1.html",
"~/App/templates/subsection1/subsection1-page.html",
"~/App/templates/subsection2/subsection2.html",
"~/App/templates/subsection2/subsection2-page.html"));
// style bundle
bundles.Add(new StyleBundle("~/Content/style").Include(
"~/Content/bootstrap.css",
"~/Content/site.css"));
BundleTable.EnableOptimizations = Configuration.EnableBundling;
}
}
}
<file_sep>/lib/Sitecore/README.txt
Please Add the following, provided by your existing Sitecore install, to this directory:
HtmlAgilityPack.dll
Lucene.Net.dll
Sitecore.Buckets.Client.dll
Sitecore.Buckets.dll
Sitecore.ContentSearch.Client.dll
Sitecore.ContentSearch.Linq.Lucene.dll
Sitecore.ContentSearch.Linq.dll
Sitecore.ContentSearch.LuceneProvider.dll
Sitecore.ContentSearch.dll
Sitecore.Kernel.dll
Sitecore.Logging.dll
Sitecore.Mvc.dll
Sitecore.Update.dll
Sitecore.Zip.dll
System.Web.Helpers.dll
Telerik.Web.UI.Skins.dll
Telerik.Web.UI.dll
Telerik.Web.UI.xml
sitecore.nexus.dll<file_sep>/DiSConnected.Angular.Web/Classes/Configuration.cs
using System.Web.Configuration;
namespace DiSConnected.Angular.Web.Web.Classes
{
/// <summary>
/// Configuration helper class
/// </summary>
public static class Configuration
{
/// <summary>
/// The front end log level
/// </summary>
public static string LogLevel
{
get { return _logLevel ?? (_logLevel = WebConfigurationManager.AppSettings["LogLevel"]); }
}
private static string _logLevel = null;
/// <summary>
/// Enable bundling defined in /AppStart/BundleConfig.cs
/// </summary>
public static bool EnableBundling
{
get { return _enableBundling.HasValue ? _enableBundling.Value : (_enableBundling = bool.Parse(WebConfigurationManager.AppSettings["EnableBundling"])).Value; }
}
private static bool? _enableBundling = null;
/// <summary>
/// The associated Rest Endpoint for articles
/// </summary>
public static string ArticleEndpoint
{
get { return _articleEndpoint ?? (_articleEndpoint = WebConfigurationManager.AppSettings["ArticleEndpoint"]); }
}
private static string _articleEndpoint = null;
/// <summary>
/// The associated Rest Endpoint for sitewide content
/// </summary>
public static string SiteEndpoint
{
get { return _siteEndpoint ?? (_siteEndpoint = WebConfigurationManager.AppSettings["SiteEndpoint"]); }
}
private static string _siteEndpoint = null;
}
}<file_sep>/README.md
## Codename:
DiSConnected
## Overview:
A disconnected approach to Sitecore using Sitecore API, WebApi, and AngularJS. This project should only serve as a starterkit to build off of. Inline documentation was provided to help guide you in extending it.
### Setup? ###
* Should be pretty straightforward, what you will need to do is update the Sitecore Web project publish profile to point at your Sitecore install, and copy the necessary .dlls to the `/lib/Sitecore` directory listed in the README.txt
### What's all in it? ###
* A Sitecore Project intended to be published over the top of an existing Sitecore Install
* A MVC Project that is intended to run on a different instance/server to serve as the head of the site, referencing the Sitecore backend endpoints
### Ok, in a paragraph, how does it work? ###
The Angular Web MVC site serves as the head, using *templates* and *directives* to provide presentation and the *factories* to provide the content (*please refer to `/App/factories/article.js` for example*). The actual RESTful urls reside in the *web.config* of the front end site, and are passed thru the controller to the front end for the angular *factories* to consume. These RESTful endpoints are created using WebApi controllers in the Sitecore Web project and map to actual paths using *RouteAttributes* on the controllers themselves (*refer to `/Controllers/ArticlesController.cs` for example*). Sitecore 7.2 was the version this was initially developed against, but should be able to work with other versions with little (hopefully no) modification needed.
A special thanks to <NAME> for providing me with the groundwork for the angular project.
Enjoy.<file_sep>/DiSConnected.Sitecore.Web/App_Start/WebApiConfig.cs
using System;
using System.Configuration;
using System.Linq;
using System.Net.Http.Formatting;
using System.Reflection;
using System.Web.Http;
using System.Web.Http.Cors;
using DiSConnected.Sitecore.Web.App_Start;
using DiSConnected.Sitecore.Web.Common.Classes.Caching;
using DiSConnected.Sitecore.Web.Common.Classes.Interfaces;
using Newtonsoft.Json.Serialization;
using WebApi.OutputCache.V2;
[assembly: WebActivatorEx.PostApplicationStartMethod(typeof(WebApiConfig), "Start")]
namespace DiSConnected.Sitecore.Web.App_Start
{
public class WebApiConfig
{
public static void Start()
{
GlobalConfiguration.Configure(Register);
SetupMappingsItemClassToItemClassDTO();
}
public static EndpointCache EndpointCache = new EndpointCache();
public static void Register(HttpConfiguration config)
{
// initialize and map all attribute routed Web API controllers (note: this does not enable MVC attribute routing)
var cors = new EnableCorsAttribute("*", "*", "*") { SupportsCredentials = true };//TODO: Limit domains to acceptable domains
config.EnableCors(cors);
config.MapHttpAttributeRoutes();
RegisterCacheProvider(config);
config.Routes.MapHttpRoute(
name: "DefaultApi",
routeTemplate: "content_delivery/api/{controller}/{id}",
defaults: new { id = RouteParameter.Optional }
);
// force JSON responses only (no XML)
config.Formatters.Clear();
config.Formatters.Add(new JsonMediaTypeFormatter());
var jsonFormatter = config.Formatters.OfType<JsonMediaTypeFormatter>().First();
jsonFormatter.SerializerSettings.ContractResolver = new CamelCasePropertyNamesContractResolver();
#if (DEBUG)
jsonFormatter.SerializerSettings.Formatting = Newtonsoft.Json.Formatting.Indented;
#endif
}
private static void RegisterCacheProvider(HttpConfiguration config)
{
var disableCacheConfigString = ConfigurationManager.AppSettings["DisableServiceOutputCache"];
bool disable = false;
bool temp;
if (bool.TryParse(disableCacheConfigString, out temp))
{
disable = temp;
}
if (disable)
{
config.CacheOutputConfiguration().RegisterCacheOutputProvider(() => new NullOutputCache());
}
else
{
config.CacheOutputConfiguration().RegisterCacheOutputProvider(() => EndpointCache);
}
}
public static void SetupMappingsItemClassToItemClassDTO()
{
var baseAssembly = Assembly.Load("DiSConnected.Sitecore.Web");
var baseAssemblyDtoTypes = baseAssembly.GetTypes()
.Where(t => t.GetInterfaces().Contains(typeof(IDtoMapping)));
foreach (var dtoMappingType in baseAssemblyDtoTypes)
{
var currentType = Activator.CreateInstance(dtoMappingType);
var createMappingsMethod = dtoMappingType.GetMethod("CreateMappings");
createMappingsMethod.Invoke(currentType, null);
}
}
}
} | d65cf51f4d204235b43d5d1f591092e0db60c0e6 | [
"JavaScript",
"C#",
"Text",
"Markdown"
] | 30 | JavaScript | aleksandartraja/disconnected | 20688bc4e5534b0a120383f7ce96f44b6adb6f8b | 66f510fc2deed0bb466e4518c9ece7b126baf5e4 |
refs/heads/master | <repo_name>1156958090/garbageSortApi<file_sep>/controller/UserController.js
const model = require('../models') // 引入数据库操作模块
const jwt = require('jsonwebtoken') // 用于签发、解析`token`
const crypto = require('crypto') // 加密密码
const nodemailer = require('../nodemail')
const JudgeParams = require('./checkParams')
const USER = model.user // 用户数据
const VFCODE = model.vfcode // 验证码数据库
const APIError = require('../rest').APIError
//注册接口
async function register(ctx, next) {
let email = ctx.request.body.email
let password = ctx.request.body.password
let pwdRepeat = ctx.request.body.pwdRepeat
let code = ctx.request.body.code
let md5 = crypto.createHash('md5')
JudgeParams(ctx.request.body, 4)
//判断两次的密码是否相同,若相同则注册信息正确
if (password == pwdRepeat) {
// 向数据库中查找数据
let result = await USER.findAll({
where: {
email: email
}
})
// 如果查找到了代表注册过了
if (result.length > 0) {
ctx.rest({
code: '10005',
massage: 'email has been registed'
})
} else {
// 如果数据库中没有该用户那么则鉴定验证码
ctx.rest({
code: '1',
massage: 'registe successfully'
})
// 从数据库查询相应的验证码
let result2 = await VFCODE.findOne({
where: {
email: email
}
})
// if(Date.now() > result2.updatedAt + 300000){
// ctx.rest({
// code: '10011',
// massage: 'verification code has expired'
// })
// }
if (result2 && result2.code == code) {
// 判断成功之后加密密码储存进数据库
password = md5.update(password).digest('hex')
let res = await USER.create({
email: email,
password: <PASSWORD>,
gender: '男'
})
console.log(JSON.stringify(res))
}
else {
// 验证码不正确
ctx.rest({
code: '10004',
massage: 'verification code is incorrent'
})
}
}
} else {
// 返回两次密码不相同
ctx.rest({
code: '10010',
massage: 'twice password is different'
})
}
}
//登录接口
async function login(ctx, next) {
let email = ctx.request.body.email
let password = ctx.request.body.password
let md5 = crypto.createHash('md5')
let token
JudgeParams(ctx.request.body, 2)
//加密密码
password = md5.update(password).digest('hex')
let result = await USER.findOne({
where: {
email: email
}
})
// 如果有结果
if (result) {
// 判断密码是否一样
if (password == result.password) {
// 创建token
token = jwt.sign(
{ email: email, }, // 需要加密的payload
'thisIsSecret', // privateKey
{ expiresIn: 60 * 60 * 4 } // 过期时间
)
// 返回token
ctx.rest({
code: '1',
massage: 'login ok',
data: {
token: token,
expire: '4h'
}
})
console.log(token)
} else {
ctx.rest({
code: '10006',
massage: 'password is incorrent'
})
}
} else {
ctx.rest({
code: '10009',
massage: 'email not registered '
})
}
}
//找回密码接口
async function getPwdBack(ctx,next) {
let md5 = crypto.createHash('md5')
let {
email,
code,
password,
pwdRepeat} = ctx.request.body
// 加密密码
password = md5.update(password).digest('hex')
// 通过邮箱验证码的方式找回密码
let result = await VFCODE.findOne({
where: {
email: email
}
})
if(result && result.code == code)
{
let update = await USER.findOne({
where:{
email:email
}
})
update.password = <PASSWORD>
update.save()
// 验证成功,可以修改密码
ctx.rest({
code:'10022',
massage:'password has updated successfully'
})
}
else{
if(result.code !== code)
{
ctx.rest({
code: '10004',
massage: 'verification code is incorrent'
})
}
ctx.rest({
code: '10009',
massage: 'email not registered '
})
}
}
//获取注册验证码接口
async function getVfCode(ctx, next) {
let email = ctx.request.body.email // 获取到用户输入的email
let code = MathRand() // 用于生成随机验证码
JudgeParams(ctx.request.body, 1)
// 查看用户是否已经注册
var result = await USER.findAll({
where: {
email: email
}
});
// 如果注册了就直接返回已经注册
if (result.length > 0) {
ctx.rest({
code: '10005',
massage: 'email has been registed'
})
} else {
// 响应成功
ctx.rest({
code: '1',
massage: 'email has passed checktion and eamil send successfully'
})
let mail = {
// 发件人
from: '<<EMAIL>>',
// 主题
subject: '请您签收您的验证码', //邮箱主题
// 收件人
to: email,// 前台传过来的邮箱
// 邮件内容,HTML格式
text: 'Your verification code is ' + code // 发送验证码
}
// 查询是否第一次请求验证码
let result2 = await VFCODE.findAll({
where: {
email: email
}
})
if (result2.length > 0) {
// 不是第一次就更新验证码
let data = await VFCODE.findOne({
where: {
email: email,
}
})
data.code = code
data.updatedAt = Date.now()
// 将新的验证码储存到数据库中
data.save()
.then(res => {
console.log(res)
})
.catch(err => {
console.log(err)
})
} else {
// 是第一次就将信息与将验证码存入数据库
await VFCODE.create({
email: email,
code: code
})
}
nodemailer(mail)
}
}
// 获取找回密码验证码
// 还没处理没注册的人也能获取验证码
async function getVfCodeToFind(ctx, next) {
let email = ctx.request.body.email // 获取到用户输入的email
let code = MathRand() // 用于生成随机验证码
JudgeParams(ctx.request.body, 1)
var result = await USER.findAll({
where: {
email: email
}
});
// 响应成功
ctx.rest({
code: '1',
massage: 'email has passed checktion and eamil send successfully'
})
let mail = {
// 发件人
from: '<<EMAIL>>',
// 主题
subject: '请您签收您的验证码', //邮箱主题
// 收件人
to: email,// 前台传过来的邮箱
// 邮件内容,HTML格式
text: 'Your verification code is ' + code // 发送验证码
}
// 查询是否第一次请求验证码
let result2 = await VFCODE.findAll({
where: {
email: email
}
})
if (result2.length > 0) {
// 不是第一次就更新验证码
let data = await VFCODE.findOne({
where: {
email: email,
}
})
data.code = code
data.updatedAt = Date.now()
// 将新的验证码储存到数据库中
data.save()
.then(res => {
console.log(res)
})
.catch(err => {
console.log(err)
})
} else {
// 是第一次就将信息与将验证码存入数据库
await VFCODE.create({
email: email,
code: code
})
}
nodemailer(mail)
}
// 随机数生成函数
function MathRand() {
var num = "";
for (var i = 0; i < 6; i++) {
num += Math.floor(Math.random() * 10);
}
return num
}
async function hello(ctx,next){
ctx.body = "hello world !"
}
module.exports = {
'POST /api/user/register': register,
'POST /api/user/login': login,
'POST /api/user/getPwdBack': getPwdBack,
'POST /api/user/getVfCode': getVfCode,
'POST /api/user/getVfCodeToFind':getVfCodeToFind,
'GET /hello':hello
}<file_sep>/models/user.js
const db = require('../db')
module.exports = db.defineModel('users', {
email: db.STRING(50),
password:db.STRING(50),
gender: db.ENUM('男','女'),
}, {
timestamps: false
});
<file_sep>/models/vfcode.js
const db = require('../db')
module.exports = db.defineModel('vfcodes', {
email: db.STRING(50),
code:db.INTEGER
}, {
timestamps: false
});
<file_sep>/app.js
const Koa = require('koa2')
const koaBody = require('koa-body') // body解析模块
const jwtKoa = require('koa-jwt') // 用于路由权限控制
const jwt = require('jsonwebtoken') // 用于签发、解析`token`
const controller = require('./controller') // 路由控制模块
const restify = require('./rest').restify
const path = require('path')
const dir = path.join(__dirname + '/imageTmp')
const app = new Koa()
// 声明不需要token的接口
app.use(
jwtKoa({
secret: 'thisIsSecret'
}).unless({
path: [
/^\/api\/user\/login/,
/^\/api\/user\/register/,
/^\/api\/user\/getVfCode/,
/^\/api\/user\/garbageSort/,
/^\/api\/user\/getVfCodeToFind/,
/^\/api\/user\/getPwdBack/,
/^\/hello/
]
})
);
//检查接口是否是否通过authorization验证
app.use((ctx, next) => {
if (ctx.header && ctx.header.authorization) {
const parts = ctx.header.authorization.split(' ');
if (parts.length === 2) {
//取出token
const scheme = parts[0];
const token = parts[1];
if (/^Bearer$/i.test(scheme)) {
//jwt.verify方法验证token是否有效
jwt.verify(token, 'thisIsSecret', function (err, decoded) {
// 有效的话就把解码出来的值放到request中方便之后的路由使用
ctx.getEmail = () => {
return {
email: decoded.email
}
}
// 过期了之后就重新发送token
if (err && err.name == 'TokenExpiredError') {
// 目前用的方法是直接解码获取email,再次生成token发送,按理来说需要refresh_token
let data = jwt.decode(token)
let tokenAgain = jwt.sign({ email: data.email }, 'thisIsSecret', { expiresIn: 60 * 60 * 4 })
ctx.response.type = 'application/json';
ctx.body = {
code: '11111',
message: 'token expried',
data: {
token: tokenAgain,
exprie: '4h'
}
}
}
});
}
}
}
return next().catch(err => {
if (err.status === 401) {
ctx.status = 401;
ctx.body =
'Protected resource, use Authorization header to get access\n';
} else {
throw err;
}
});
});
app.use(koaBody({
multipart: true,
formidable:
{
uploadDir: dir,
maxFieldsSize: 200 * 24 * 1024
}
}))
app.use(restify())
app.use(controller())
app.listen(3000, () => {
console.log('Please interview http://localhost:3000')
})
<file_sep>/controller/OrderController.js
const jwt = require('jsonwebtoken') // 用于签发、解析`token`
const MODEL = require('../models')
const JudgeParams = require('./checkParams')
const USER = MODEL.user
const ORDER = MODEL.order
const query = require('../db').query
const Op = require('../db').Op
// 创建发布订单
async function pubOrders(ctx, next) {
let { addres,
contact,
telephone,
taketime,
content,
weight,
remarks,
longitude, // 经度
latitude, // 纬度
city,
} = ctx.request.body // 忘记了可以直接用解构的方式来初始化
let email = ctx.getEmail().email
JudgeParams(ctx.request.body, 9)
// 创建一个订单号,订单号由时间戳,用户邮箱前四位,订单编号组成
let orderid = await varifyOrderid()
let newPub = await ORDER.create({
orderid: orderid,
email: email,
emailworker: null,
city: city,
addres: addres,
contact: contact,
telephone: telephone,
taketime: taketime,
content, content,
weight: weight,
remarks: remarks,
longitude: longitude,
latitude: latitude,
overtime: Date.now() + 30 * 60 * 1000,
completed: 0
})
ctx.rest({
code: '1',
message: 'publish OK'
})
// 由于生成订单号是生成随机数,而随机数又有重复的可能,即使是十六位的数字,基数大了之后有重复的几率还是会升高
// 所以还是要验证orderid是否重复
async function varifyOrderid() {
let orderid = Math.random() * Math.pow(10, 16)
console.log(orderid + '这是orderid')
// 先查找数据库是否有该订单号如果有的话就换一个
let result = await ORDER.findOne({
where: {
orderid: orderid
}
})
if (result) {
var result2 = varifyOrderid()
return result2
}
else {
return orderid
}
}
}
// 获取附近的订单
async function getDliOrders(ctx, next) {
const { longitude,
latitude,
limit,
offset
} = ctx.query
const email = ctx.getEmail().email
console.log(email)
// 这里的sql需要用到自定义函数跟sql语句符合,暂时用sequelize不知道怎么实现,所以选择使用算了原生的查询
// 返回的是距离骑手八千米以内的订单
// sequelize的写成如下形式略微复杂,失去了封装的意义。所以我打算直接使用原生sql语句
const sql = `select *
from orders
where _fnGetdistance(${longitude},${latitude},longitude,latitude) < 8000
limit ${limit}
offset ${offset * 10}`
console.log(sql)
let result = await query(sql, { row: false, model: ORDER })
// 如果附近有订单就给每一个订单加一个distance字段,好让前端调用直接渲染
if (result) {
// 给每个订单添加一个到骑手的距离
/**
* 由于处理数据是个异步过程所以用到了promise
* 很麻烦的一点是直接打印result会显示你查询到的内容
* 但是如果打印如下的单个item就会发现他其实很复杂的一个对象,所以直接向result数组插入是没有任何意义的他具体的数据在datavalues中
*/
var time = new Promise((resolve, reject) => {
for (let item of result) {
item.dataValues.distance = Distance(latitude, longitude, item.latitude, item.longitude)
}
resolve(result)
})
// await返回数据
ctx.rest({
data: await time.then(res => {
return res
})
})
} else {
ctx.rest({
code: '11002',
message: 'There are no new orders nearby'
})
}
}
async function takeOrder(ctx, next) {
let {
email,
orderid
} = ctx.query
// 获取到骑手的email,然后将骑手的email加到订单的emailworker中
const emailworker = ctx.getEmail().email
// 这里应该用的是订单id,但是没有生成
let result = ORDER.findAll({
email: email,
orderid: orderid
})
// 如果查询到了结果
if (result) {
result.emailworker = emailworker
// 将骑手的email加入到该订单中
result.save()
.then(res => {
ctx.rest({
code: '1',
message: 'OK',
data: {
res
}
})
})
.catch(err => {
console.log(err)
})
} else {
ctx.rest({
code: '11003',
message: 'email or orderid is incorrent'
})
}
}
// 获取用户所有的订单
/**
* 获取到订单之后分三种
* 1,如果emailworker为空代表下单了还没有人接单
* 2,如果emailworker不为空但是completed为false代表有人接了单但是还没有完成
* 3,如果emailworker不为空且completed为true代表已完成的订单
*/
async function getUserOrder(ctx, next) {
let email = ctx.getEmail()
let result = await ORDER.findAll({
where: {
email: email,
}
})
if (result.length > 0) {
ctx.rest({
code: '1',
message: 'Query OK',
data: {
result
}
})
} else {
ctx.rest({
code: '11101',
message: 'user has not order'
})
}
}
async function updateUserOrder(ctx, next) {
// 这里应该是放在请求的路径中直接把订单号传过来利用update
// 这个是获取附加在路径后面:后面的参数也就是user/:orderid
let orderid = ctx.params.orderid
let paramsKeys = Object.keys(ctx.request.body)
let paramsVlaues = Object.keys(ctx.request.body)
let result = await ORDER.findOne({
where: {
orderid: orderid
}
})
// 修改这次更新的时间
result.updatedAt = Date.now()
// 直接遍历需要修改的属性,省去了全部重新赋值
for (let i in paramsKeys) {
result[i] = ctx.request.body[i]
}
// 保存修改
result.save()
ctx.rest({
code: '1',
message: 'change OK'
})
}
/**
*
* @param {double} lat1 骑手的经度
* @param {double} lng1 骑手的纬度
* @param {double} lat2 订单的经度
* @param {double} lng2 订单的纬度
*/
function Distance(lat1, lng1, lat2, lng2) {
var radLat1 = lat1 * Math.PI / 180.0;
var radLat2 = lat2 * Math.PI / 180.0;
var a = radLat1 - radLat2;
var b = lng1 * Math.PI / 180.0 - lng2 * Math.PI / 180.0;
var s = 2 * Math.asin(Math.sqrt(Math.pow(Math.sin(a / 2), 2) +
Math.cos(radLat1) * Math.cos(radLat2) * Math.pow(Math.sin(b / 2), 2)));
s = s * 6378.137;// EARTH_RADIUS;
s = Math.round(s * 10000) / 10000;
return s;
}
module.exports = {
'POST /api/user/pubOrders': pubOrders,
'GET /api/delivery/getDliOrders': getDliOrders,
'GET /api/delivery/takeOrders': takeOrder,
'GET /api/user/getUserOrder': getUserOrder,
'UPDATE /api/user/updateUserOrder': updateUserOrder
}
/**
* 我认为的订单逻辑:
* 用户通过签到,充值等任务或者活动获得相应的积分,而这些积分可以被用于下单,每一单按多少垃圾等比例的需要多少积分
* 而骑手可以通过垃圾的多少(具体是可回收的有多少)获得相应的积分,骑手端的积分可以转化成相应的收入,但是用户端是不行的
*/<file_sep>/controller/garbageSort.js
var AipImageClassifyClient = require("../src/AipImageClassify");
// 设置APPID/AK/SK
var APP_ID = "17208555";
var API_KEY = "<KEY>";
var SECRET_KEY = "<KEY>";
var Request = require('request')
// 新建一个对象,建议只保存一个对象调用服务接口
var client = new AipImageClassifyClient(APP_ID, API_KEY, SECRET_KEY);
var fs = require('fs');
// // 如果有可选参数
// var options = {};
// options["baike_num"] = "5";
// // 带参数调用通用物体识别
// client.advancedGeneral(image, options).then(function(result) {
// console.log(result.result[0].keyword);
// }).catch(function(err) {
// // 如果发生网络错误
// console.log(err);
// });;
async function garbageSort(ctx, next) {
var file = ctx.request.files.file
var image = fs.readFileSync(file.path).toString("base64");
var a = new Promise((resolve, reject) => {
client.advancedGeneral(image).then(function (result) {
console.log(result)
Request.get({
url:'https://service.xiaoyuan.net.cn/garbage/index/search?kw=' + encodeURI(result.result[0].keyword),
timeout:3000,
},function (err, res, body) {
console.log('error:', err); // Print the error if one occurred
console.log('statusCode:', res && res.statusCode); // Print the response status code if a response was received
console.log('body:', body); // Print the HTML for the Google homepage.
resolve(JSON.stringify(body))
})
}).catch(function (err) {
// 如果发生网络错误
console.log(err);
});
})
var result = await a.then(res => {
return res
})
ctx.rest({
code: '1',
message: 'OK',
data: JSON.stringify(result)
})
}
module.exports = {
'POST /api/user/garbageSort': garbageSort
}<file_sep>/models/order.js
const db = require('../db')
module.exports = db.defineModel('orders', {
orderid:db.BIGINT,
email: db.STRING(50),
emailworker: db.STRING(50),
address: db.STRING(70),
contact: db.STRING(20),
telephone: db.BIGINT,
taketime: db.BIGINT,
overtime: db.BIGINT,
completed: db.BIGINT,
city:db.STRING(50),
content: db.STRING(70),
weight: db.INTEGER,
remarks: db.STRING(70),
longitude:db.BIGINT,
latitude:db.BIGINT,
}, {
timestamps: false
});
<file_sep>/authorization.js
async function authorization(ctx, next) {
if (ctx.header && ctx.header.authorization) {
const parts = ctx.header.authorization.split(' ');
if (parts.length === 2) {
//取出token
const scheme = parts[0];
const token = parts[1];
if (/^Bearer$/i.test(scheme)) {
//jwt.verify方法验证token是否有效
jwt.verify(token, 'thisIsSecret', function (err, decoded) {
// 有效的话就把解码出来的值放到request中方便之后的路由使用
ctx.getEmail = () => {
return {
email: decoded.email
}
}
// 过期了之后就重新发送token
if (err.name == 'TokenExpiredError') {
// 目前用的方法是直接解码获取email,再次生成token发送,按理来说需要refresh_token
let data = jwt.decode(token)
let tokenAgain = jwt.sign({ email: data.email }, '<PASSWORD>IsSecret', { expiresIn: 60 * 60 * 4 })
ctx.response.type = 'application/json';
ctx.body = {
code: '11111',
message: 'send token successfully',
data: {
token: tokenAgain,
exprie: '4h'
}
}
}
});
}
}
}
return next().catch(err => {
if (err.status === 401) {
ctx.status = 401;
ctx.body =
'Protected resource, use Authorization header to get access\n';
} else {
throw err;
}
});
}<file_sep>/controller/checkParams.js
const APIError = require('../rest').APIError
/**
* 检查参数是否匹配
* @param {Object} obj 要查询参数个数的对象
* @param {Number} length 要查询参数的应有个数
*/
function JudgeParams(obj, length) {
if (Object.keys(obj).length != length) {
throw new APIError(null, 'Lack of parameter')
}
}
module.exports = JudgeParams | 84375563ea4d79ae661d234a230b2016ca1eb2f3 | [
"JavaScript"
] | 9 | JavaScript | 1156958090/garbageSortApi | 1234c71752307782581e45db13946e951015112c | 5767fec761e91c976c9f028d8e295795cc41d967 |
refs/heads/master | <repo_name>MoseyM/CLI_Todo_List<file_sep>/todo.php
<?php
//this is the function that actually adds the items to the list. As a string then pushed to the array $items
function listItems($list)
{
$string = '';
foreach($list as $key => $item){
$key++;
$string .= "[{$key}] {$item}".PHP_EOL;
$items[] = $string;
}
return $string;
}
//this checks to see if the response is going to be uppercase or lowercase. Upper for the options and lower for items that are added to the list.
function getInput($upper = false)
{
// Return filtered STDIN input
if($upper) {
return strtoupper(trim(fgets(STDIN)));
}
else{
return trim(fgets(STDIN));
}
}
//this will ask the user for input on what they want to do for the list
function askUser() {
fwrite(STDOUT, '(N)ew item, (R)emove item, (S)ort List, (O)pen file, s(A)ve file, (Q)uit : ');
$input = getInput(true);
return $input;
}
//this will ask the user if they want to order the list and will sort based on options
function sort_menu($list) {
fwrite(STDOUT, "Pick an option: (A)-Z, (Z)-A, (O)rder entered, (R)everse order (C)ancel: ");
$option = getInput(true);
switch ($option) {
case 'A':
sort($list);
break;
case 'Z':
rsort($list);
break;
case 'O':
ksort($list);
break;
case 'R':
krsort($list);
break;
case 'C':
break;
default:
break;
}
return $list;
}
//Additional function that gives the user the option to add the new item to the end or beginning of the list
function addToList($list, $newItem) {
fwrite(STDOUT,"Would you like to add this to the (B)eginning or (E)nding of the List? ");
$answer = getInput(true);
switch($answer) {
case 'B':
array_unshift($list, $newItem);
break;
case 'E':
array_push($list, $newItem);
break;
default:
array_push($list, $newItem);
break;
}
return $list;
}
//added password to allow for superuser status. Only appears when F or L is pressed.
function powerUser() {
fwrite(STDOUT, "Please enter password: ");
$password = '<PASSWORD>';
$passwordInput = trim(fgets(STDIN));
if($password === $passwordInput) {
return 'You have entered PowerUser';
}
else{
echo "Invalid Password, Exiting".PHP_EOL;
exit(0);
}
}
//this will locate the file and return its contents which to be the length of the whole document (filesize($fileLocation))
function openFile() {
//setting variable to the exact location of the file with user input for the actual file name
$fileLocation = 'data/'.trim(fgets(STDIN));
//this just gives the computer a location for the file
$handle = fopen($fileLocation, 'r');
$contents = fread($handle, filesize($fileLocation));
fclose($handle);
return $contents;
}
// Create array to hold list of todo items
$items = [];
do {
//added echo command before switch so list is always shown
echo listItems($items);
//Had to assign function to another variable.
$choice = askUser();
switch ($choice) {
case 'N':
echo 'Enter item: ';
//try to understand this code below.
$newToDo = ucfirst(getInput());
if(empty($items)){
$items[] = $newToDo;
}
else{
$items = addToList($items, $newToDo);
}
break;
case 'R':
echo 'Enter item number to remove: ';
$key = getInput();
$key--;
// Removes the item from array
unset($items[$key]);
break;
case 'S':
$items = sort_menu($items);
break;
case 'F':
echo powerUser().PHP_EOL;
sleep(1);
if(empty($items)){
echo "Array is Empty".PHP_EOL;
}
else{
echo "First Item Being Deleted".PHP_EOL;
array_shift($list);
sleep(2);
echo "Complete...".PHP_EOL;
}
break;
case 'L':
echo powerUser().PHP_EOL;
if(empty($items)){
echo "Array is Empty".PHP_EOL;
}
else{
echo "Removing Last Item In List".PHP_EOL;
array_pop($list);
sleep(2);
echo "Complete...".PHP_EOL;
}
break;
case 'O':
fwrite(STDOUT, "Please enter File Location: ");
$theContents = explode("\n", openFile());
$items = array_merge($theContents, $items);
echo "Information Being Added to the To Do List.".PHP_EOL;
sleep(2);
break;
case 'A':
$savedFile = 'data/list.txt';
if (filesize($savedFile) > 0) {
fwrite(STDOUT, "The file will be OverWritten. Are you sure? (Y or N)");
$confirm = getInput(true);
if($confirm == 'Y'){
echo "File Saved..".PHP_EOL;
$handle = fopen($savedFile, 'w');
foreach($items as $todo){
fwrite($handle, $todo.PHP_EOL);
}
}
else {
echo "Save Canceled..".PHP_EOL;
sleep(1);
}
}
break;
case 'Q':
echo 'GoodBye!'.PHP_EOL;
break 2;
default:
echo "Please choose a valid option!".PHP_EOL;
}
} while ($choice != 'Q');
<file_sep>/README.md
Beginning project of my TODO list.
-----------------
<NAME>. | f4c4d0f5f596b3a502cbcda94740a77b88985b70 | [
"Markdown",
"PHP"
] | 2 | PHP | MoseyM/CLI_Todo_List | fce6f5abf0879a9e5a07a676ef8fe447c1fddc38 | 547342a09ab994862e28862fbff5cc56b16dda77 |
refs/heads/master | <file_sep>import React, { Component } from 'react';
class Taxidermy extends Component {
render(){
return(
<div>
<h1>Todd etc...</h1>
</div>
)
}
}
export default Taxidermy;
<file_sep>import React from 'react';
import { Switch, Route } from 'react-router-dom';
import Home from './Routes/Home';
import TradMedia from './Routes/TradMedia';
import Photography from './Routes/Photography';
import Jewelry from './Routes/Jewelry';
import Costumes from './Routes/Costumes';
import Pets from './Routes/Pets';
import Taxidermy from './Routes/Taxidermy';
import Commissions from './Routes/Commissions';
import About from './Routes/About';
import Contact from './Routes/Contact';
const Main = () => (
<Switch>
<Route exact path='/' component={ Home }></Route>
<Route path='/Traditional Media' component={ TradMedia }></Route>
<Route path='/Photography' component={ Photography }></Route>
<Route path='/Jewelry' component={ Jewelry }></Route>
<Route path='/Costumes' component={ Costumes }></Route>
<Route path='/AromatheraPets' component={ Pets }></Route>
<Route path='/Taxidermy' component={ Taxidermy }></Route>
<Route path='/Commissions' component={ Commissions }></Route>
<Route path='/About the Artist' component={ About }></Route>
<Route path='/Contact' component={ Contact }></Route>
</Switch>
)
export default Main;
| a8cd348a662e6c776b033657f7577e5f37d9f455 | [
"JavaScript"
] | 2 | JavaScript | ekg-91/wickedwolf | f76340af70486dafcdfed8eb02148cd0700a420c | a2767352f0ddfab8e42d494848e39749e133496f |
refs/heads/master | <repo_name>Han9527/Phylogenomics<file_sep>/filter_metal_gene.py
metal_id=[]
with open("new_metal_gene.txt","r") as f1:
for line in f1.readlines():
metal_id.append(line.strip("\n"))
metal_id
with open("barley.ortho.txt", "r") as f2:
for i in f2.readlines():
ortho_group=i.split("\t")[0]
ortho_line=i.split("\t")[1].strip("\n")
#print(ortho_line)
for gene in ortho_line.split(","):
single_gene=gene.strip(" ")
if single_gene in metal_id:
metal_ortho=ortho_group+": "+single_gene
gene_ortho.append(metal_ortho)
with open("gene_ortho.txt", "w") as f3:
for group in gene_ortho:
#print(group)
f3.write(group+"\n")
<file_sep>/orthofinder.sh
date
orthofinder -f /home/sda1/workspace/MetalRNA/18.comparative_genome/01.aa/14.only_barley/00.raw_data/ \
-S diamond \
-M msa \
-T fasttree \
-t 60 \
#sleep 2s
date
| 89b35a227d2b2b502d54a05e28a87f423c470106 | [
"Python",
"Shell"
] | 2 | Python | Han9527/Phylogenomics | 07dc4493f52b9d32753488a73d4ba416e819aa34 | 8ecd098666a038f2dbe130b5cbff9ac44fdf645b |
refs/heads/main | <repo_name>umilton-git/NFT-To-Testnet<file_sep>/README.md
# NFT-To-Testnet
A simple setup to deploy an NFT to a testnet, made for MPA workshop.
Credit for the simple contract: https://www.youtube.com/watch?v=ZH_7nEIJDUY
# Node.js Repositories Needed:
- truffle
- @truffle/hdwallet-provider or truffle-hdwallet-provider
- @openzeppelin/contracts@3.4.0
- web3
Note: installing these repositories individually is only necessary if you're creating your own project. If you're using mine, simply use "npm install" and the package-lock.json will handle everything for you.
Have fun in the Hackathon!
Video with info on deploying and code walkthrough:
https://www.youtube.com/watch?v=WNhW3xZDvOU
<file_sep>/migrations/2_deploy_nft_contract.js
const TestnetNFT = artifacts.require("TestnetNFT")
module.exports = async function(deployer){
await deployer.deploy(TestnetNFT)
} | f15c0166d7d67b5aa4866e89ee0e3131372089af | [
"Markdown",
"JavaScript"
] | 2 | Markdown | umilton-git/NFT-To-Testnet | cbbd147a8a10a8f17fb31df771cd9cea00a42bd7 | f3a88b8e34a86ab6f8cd5143aa27663cbbd7fcc9 |
refs/heads/master | <file_sep>using System.Linq;
namespace Methods
{
public static class SpecialString
{
//Regular Method
//public static bool IsState(string source)
//Extention Method
public static bool IsState(this string source)
{
string[] stateCodes =
{
"AL", "AK", "AZ", "AR", "CA", "CO", "CT", "DE", "DC",
"FL", "GA", "HI", "ID", "IL", "IN", "IA", "KS", "KY",
"LA", "ME", "MD", "MA", "MI", "MN", "MS", "MO", "MT",
"NE", "NV", "NH", "NJ", "NM", "NY", "NC", "ND", "OH",
"OK", "OR", "PA", "RI", "SC", "DS", "TN", "TX", "UT",
"VT", "VA", "WA", "WV", "WI", "WY"
};
if (source == null)
return false;
source = source.ToUpper();
//Converted foreach loop into LINQ Exression
return stateCodes.Any(item => source == item);
//foreach (var item in stateCodes)
//{
// if (source == item)
// {
// return true;
// }
//}
}
}
}<file_sep>using System;
//Added the Methods namespace for the extention methods
using Methods;
namespace ExtentionMethod
{
internal class Program
{
private static void Main()
{
ShowTest("co");
ShowTest("WA");
ShowTest("AW");
ShowTest("AL");
ShowTest("NV");
ShowTest("NM");
ShowTest("Mi");
ShowTest("ME");
ShowTest("hi");
Console.ReadLine();
}
public static void ShowTest(string state)
{
//Calling the regular method
//Console.WriteLine($"You entered: {state}, It is a state: {SpecialString.IsState(state)}");
//Calling the extention string method shorter and easier to read
Console.WriteLine($"You entered: {state}, It is a state: {state.IsState()}");
}
}
}
<file_sep>using System;
namespace SimpleDelegateExample
{
internal class Program
{
//public delegate int MyDelegate(int a, int b);
public static int Add(int a, int b)
{
return a + b;
}
public static void CallDelegate(Func<int, int, int> myDelegate)
{
Console.WriteLine($"{myDelegate(272, 153)}\n");
}
private static void Main()
{
//Instantiate new class
var michael = new NewClass("<NAME>");
Func<int, int, int> myDelegate = Add;
Console.WriteLine($"{myDelegate(271, 152)}");
CallDelegate(myDelegate);
Func<string, int, int, string> showMe = (a, b, c) => string.Format(a, b, c, (b + c));
Console.WriteLine($"Result of calling the \'ShowMe\' Function: {showMe("{0} + {1} = {2}", 3, 5)}");
//Print out the myName variable from instantiation of the new class
Console.WriteLine($"\nMy Name is: {michael.myName}");
Console.ReadLine();
}
}
}
<file_sep>using System;
namespace LambdaExamples
{
internal class Program
{
private static void Main()
{
#region Subtract Method
Console.WriteLine($"Subtract Method: {Subtract(5, 2)}");
Func<int, int, int> subtract = (a, b) => a - b;
Console.WriteLine($"subtract Lambda: {subtract(5, 2)}\n");
#endregion
#region Multiply Method
Console.WriteLine($"Multiply Method: {Multiply(5)}");
Func<int, int> multiply = a => a * 5;
Console.WriteLine($"multiply Lambda: {multiply(5)}\n");
#endregion
#region Display Method
Display("<NAME>");
Action<string> display = a => Console.WriteLine($"display Lambda: {a}");
display("<NAME>\n");
#endregion
#region DisplayWarning Method
DisplayWarning();
Action displayWarning = () => Console.WriteLine("displayWarning Lambda: Warning\n");
displayWarning();
#endregion
#region SimpleMath Method
Console.WriteLine($"SimpleMath Method: {SimpleMath(1, 3, 3)}");
Func<int, int, int, decimal> simpleMath = (a, b, c) => (a + b) / c;
Console.WriteLine($"simpleMath Lambda: {simpleMath(1, 2, 3)}\n");
#endregion
Console.ReadLine();
}
public static int Subtract(int a, int b)
{
return a - b;
}
public static int Multiply(int a)
{
return a*5;
}
public static void Display(string value)
{
Console.WriteLine($"Display Method: {value}");
}
public static void DisplayWarning()
{
Console.WriteLine("DisplayWarning Method: Warning");
}
public static decimal SimpleMath(int a, int b, int c)
{
return (a + b)/c;
}
}
}
<file_sep>using System;
using System.Linq;
using System.Xml.Linq;
namespace XmlDataQuery
{
class Program
{
static void Main()
{
#region SurfaceBook
//var customers = XDocument.Load(@"f:\XML_Files\Customers.xml");
#endregion
#region SurfacePro 3
var customers = XDocument.Load(@"d:\XML_Files\Customers.xml");
#endregion
var xml = from x in customers.Descendants("Customer")
where x.Attribute("City").Value == "Mexico D.F."
select x;
foreach(var name in xml)
Console.WriteLine($"Customer Name: {name.Attribute("ContactName").Value}");
}
}
}
<file_sep>using System;
using System.Linq;
using System.Data.Linq;
using System.Data.Linq.Mapping;
namespace DatabaseQuery
{
[Table(Name = "Employees")]
internal class EmployeesTable
{
[Column] public int Id;
[Column] public string Name;
[Column] public string Title;
}
internal class Program
{
private static void Main()
{
var db = new DataContext(ConnectString());
var query = from c in db.GetTable<EmployeesTable>()
orderby c.Name
select c;
foreach (var item in query)
Console.WriteLine($"Name: {item.Name},\tTitle: {item.Title}");
Console.ReadLine();
}
//Method for creating the connect string depending on what computer I am using
//Create database in surface book and check the logic
private static string ConnectString()
{
return Environment.MachineName == "MEM-SURFACEBOOK"
? @"server = MEM-SURFACEBOOK\SQLEXPRESS; integrated security = true; database = myDatabase;"
: @"server = MEM-SURFACEPRO3\SQLEXPRESS; integrated security = true; database = myDatabase;";
}
}
}<file_sep>namespace SimpleDelegateExample
{
internal class NewClass
{
public string myName { get; set; }
public NewClass(string name)
{
myName = name;
}
}
} | 5f4edc201f37221c41e2530ecc5e3ee23b2c7f46 | [
"C#"
] | 7 | C# | memiles47/EssentialLINQ | 57267b35f1ff5e6756a1e6fe536f5548aa6c174a | dd1b1417d9ccc386cb80230146e5c722af4c91e1 |
refs/heads/master | <file_sep>import Elm from './Main.elm'
import './index.html'
import './Main.css'
const mount = document.getElementById('main')
const app = Elm.embed(Elm.Main, mount)
<file_sep>#!/bin/sh
node_modules/.bin/webpack --config webpack.build.js
<file_sep>#!/bin/sh
rm -rf elm-stuff
elm package install -y
<file_sep># elm-example
#### Bootstrap
```
scripts/bootstrap
```
#### Develop
```
scripts/dev-server
```
#### Build
```
scripts/build
```
<file_sep>#!/bin/sh
scripts/bootstrap-node &&
scripts/bootstrap-elm
| dbd173656e8202a16487c49fc06985ae4254f7fd | [
"JavaScript",
"Markdown",
"Shell"
] | 5 | JavaScript | spektroskop/elm-example | efa2764cd3a18b4594c00b765d7596678b1eca59 | 19b0b14a889749d908eec2ffe48aa1eaafd9f108 |
refs/heads/master | <repo_name>gde-pass/init<file_sep>/scripts/01
#!/bin/bash
cat /etc/passwd | grep -v -E '^#' | cut -d: -f1,3,6
<file_sep>/scripts/02
#!/bin/bash
apt-get update >> /var/log/update_script.log && apt-get upgrade >> /var/log/update_script.log
#Pour planifier le script je fais un crontab -e puis j'ecris dans le fichier "0 4 * * 0 sh $PATH/SCRIPT"
<file_sep>/README.md
# Init
## Objectifs
Ce premier projet, init vous permettera de découvrir les commandes de base système
et réseau ainsi que les nombreux services utilisés sur une machine serveur, ainsi que
quelques idées de scripts pouvant être utiles au quotidien d’un adminsys.
<file_sep>/scripts/03
#!/bin/bash
if [ -d "$1" ]
then
ls -lhS "$1"
else
echo "usage: sh $0 PATH"
fi
<file_sep>/scripts/04
#!/bin/bash
FILE="/var/tmp/checksum"
FILE_TO_WATCH="/etc/crontab"
MD5VALUE=$(md5sum $FILE_TO_WATCH)
if [ ! -f $FILE ]
then
echo "$MD5VALUE" > $FILE
exit 0;
fi;
if [ "$MD5VALUE" != "$(cat $FILE)" ];
then
echo "$MD5VALUE" > $FILE
echo "$FILE_TO_WATCH has been modified ! '*_*" | mail -s "$FILE_TO_WATCH modified !" root
fi;
#Pour la planifiation il suffit de lancer la commande "crontab -e" puis d'éditer avec "0 0 * * * $PATH"
| 15fd1bbb55573c7e4148aa203927a8f44a176f01 | [
"Markdown",
"Shell"
] | 5 | Shell | gde-pass/init | da8742751bea8c60589c1b5e8bb180c676ba67ed | cef77b2a8dd93e04eab8d24f14ce7c2aa2266b8b |
refs/heads/master | <repo_name>alexfdezsauco/PropertyBagResearch<file_sep>/PropertyBagResearch/Implementations/PropertyBags/SuperTypedPropertyBag.cs
namespace PropertyBagResearch
{
using System;
using System.Collections.Generic;
public class SuperTypedPropertyBag : IPropertyBag
{
private readonly IDictionary<string, int> _intValues;
private readonly IDictionary<string, bool> _boolValues;
private readonly IDictionary<string, short> _shortValues;
private readonly IDictionary<string, long> _longValues;
private readonly IDictionary<string, object> _referenceValues;
public SuperTypedPropertyBag(IDictionaryFactory dictionaryFactory)
{
_intValues = dictionaryFactory.GenerateDictionary<int>();
_boolValues = dictionaryFactory.GenerateDictionary<bool>();
_shortValues = dictionaryFactory.GenerateDictionary<short>();
_longValues = dictionaryFactory.GenerateDictionary<long>();
_referenceValues = dictionaryFactory.GenerateDictionary<object>();
}
public void SetValue<TValue>(string name, TValue value)
{
var targetValue = typeof(TValue);
if (targetValue == typeof(bool))
{
var tr = __makeref(value);
var bagValue = __refvalue(tr, bool);
_boolValues[name] = bagValue;
return;
}
else if (targetValue == typeof(int))
{
var tr = __makeref(value);
var bagValue = __refvalue(tr, int);
_intValues[name] = bagValue;
return;
}
{
var tr = __makeref(value);
var bagValue = __refvalue(tr, object);
_referenceValues[name] = bagValue;
}
}
public TValue GetValue<TValue>(string name)
{
var targetValue = typeof(TValue);
if (targetValue == typeof(bool))
{
if (_boolValues.TryGetValue(name, out var bagValue))
{
var tr = __makeref(bagValue);
var value = __refvalue(tr, TValue);
return value;
}
return default;
}
else if (targetValue == typeof(int))
{
if (_intValues.TryGetValue(name, out var bagValue))
{
var tr = __makeref(bagValue);
var value = __refvalue(tr, TValue);
return value;
}
return default;
}
{
if (_referenceValues.TryGetValue(name, out var bagValue))
{
var tr = __makeref(bagValue);
var value = __refvalue(tr, TValue);
return value;
}
return default;
}
}
}
}
<file_sep>/PropertyBagResearch/Implementations/PropertyBags/NonTypedPropertyBag.cs
namespace PropertyBagResearch
{
using System.Collections.Generic;
public class NonTypedPropertyBag : IPropertyBag
{
private readonly IDictionary<string, object> _values;
public NonTypedPropertyBag(IDictionaryFactory dictionaryFactory)
{
_values = dictionaryFactory.GenerateDictionary<object>();
}
public void SetValue<TValue>(string name, TValue value)
{
_values[name] = value;
}
public TValue GetValue<TValue>(string name)
{
return (TValue)_values[name];
}
}
}
<file_sep>/PropertyBagResearch/Implementations/Interfaces/IPropertyBag.cs
namespace PropertyBagResearch
{
public interface IPropertyBag
{
void SetValue<TValue>(string name, TValue value);
TValue GetValue<TValue>(string name);
}
}
<file_sep>/PropertyBagResearch/Implementations/PropertyBags/TypedPropertyBag.cs
namespace PropertyBagResearch
{
using System;
using System.Collections.Generic;
public class TypedPropertyBag : IPropertyBag
{
private readonly IDictionary<string, int> _intValues;
private readonly IDictionary<string, bool> _boolValues;
private readonly IDictionary<string, short> _shortValues;
private readonly IDictionary<string, long> _longValues;
private readonly IDictionary<string, object> _referenceValues;
private static readonly Dictionary<Type, object> _setters = new Dictionary<Type, object>();
private static readonly Dictionary<Type, object> _getters = new Dictionary<Type, object>();
static TypedPropertyBag()
{
_setters[typeof(int)] = (Action<TypedPropertyBag, string, int>)SetIntValue;
_setters[typeof(bool)] = (Action<TypedPropertyBag, string, bool>)SetBoolValue;
_setters[typeof(short)] = (Action<TypedPropertyBag, string, short>)SetShortValue;
_setters[typeof(long)] = (Action<TypedPropertyBag, string, long>)SetLongValue;
_getters[typeof(int)] = (Func<TypedPropertyBag, string, int>)GetIntValue;
_getters[typeof(bool)] = (Func<TypedPropertyBag, string, bool>)GetBoolValue;
_getters[typeof(short)] = (Func<TypedPropertyBag, string, short>)GetShortValue;
_getters[typeof(long)] = (Func<TypedPropertyBag, string, long>)GetLongValue;
}
public TypedPropertyBag(IDictionaryFactory dictionaryFactory)
{
_intValues = dictionaryFactory.GenerateDictionary<int>();
_boolValues = dictionaryFactory.GenerateDictionary<bool>();
_shortValues = dictionaryFactory.GenerateDictionary<short>();
_longValues = dictionaryFactory.GenerateDictionary<long>();
_referenceValues = dictionaryFactory.GenerateDictionary<object>();
}
public void SetValue<TValue>(string name, TValue value)
{
var targetValue = typeof(TValue);
if (_setters.TryGetValue(targetValue, out var setterObj))
{
var setter = (Action<TypedPropertyBag, string, TValue>)setterObj;
if (!(setter is null))
{
setter(this, name, value);
return;
}
}
// Old-fashioned, potentially boxing, method
_referenceValues[name] = value;
}
private static void SetIntValue(TypedPropertyBag instance, string name, int value)
{
instance._intValues[name] = value;
}
private static void SetBoolValue(TypedPropertyBag instance, string name, bool value)
{
instance._boolValues[name] = value;
}
private static void SetShortValue(TypedPropertyBag instance, string name, short value)
{
instance._shortValues[name] = value;
}
private static void SetLongValue(TypedPropertyBag instance, string name, long value)
{
instance._longValues[name] = value;
}
public TValue GetValue<TValue>(string name)
{
var targetValue = typeof(TValue);
if (_getters.TryGetValue(targetValue, out var retrievalFuncObj))
{
var retrievalFunc = (Func<TypedPropertyBag, string, TValue>)retrievalFuncObj;
return retrievalFunc(this, name);
}
throw new NotSupportedException();
}
private static int GetIntValue(TypedPropertyBag instance, string name)
{
return instance._intValues[name];
}
private static bool GetBoolValue(TypedPropertyBag instance, string name)
{
return instance._boolValues[name];
}
private static short GetShortValue(TypedPropertyBag instance, string name)
{
return instance._shortValues[name];
}
private static long GetLongValue(TypedPropertyBag instance, string name)
{
return instance._longValues[name];
}
}
}
| 553f0d1f6dbb33e7b5063880d816250a707aebff | [
"C#"
] | 4 | C# | alexfdezsauco/PropertyBagResearch | 42d6c98de3ff187272424291b36e96f6a989fcaa | ceccde5e6906946bdf232a953afa1801fd880622 |
refs/heads/master | <file_sep>package gulik.urad.where;
public class ColumnRef extends Clause {
private String columnPath;
public ColumnRef(String columnPath) {
this.columnPath=columnPath;
}
}
<file_sep>package gulik.urad.exceptions;
/** Delete me when the full OData 4.01 spec is implemented. */
public class NotImplemented extends UradException {
}
<file_sep># A better OData protocol.
This file contains ideas and pain points that I come across when dealing with Odata. My end goal is to develop
another protocol that is better.
OData is a very complex protocol. On one hand, it has a huge amount of functionality. On the other, I imagine that
implementers are shying away from it, as it is a huge amount of work to implement.
Remove features:
* Choose one data format and stick with it. XML, JSON, YAML, ASN.1, whatever - pick only one.
* Remove namespace and container names? One container only.
* Disallow getting entities by ID. Make a query for that. (maybe? But then you lose links to resources.)
* Disallow getting individual predicates. Make a select for that.
* Unify $select and $expand? Make all results a flat list using only $select.
* Edm.Binary, Edm.Stream, Edm.SBytes. Replace them with a URL link instead so that we get MIME types, download sizes, etc.
* Reduce all the numeric types to numbers with upper limit, lower limit, precision.
* $count only returns the count; no data.
In general, if something can already be done, don't add another way of doing it.
New features:
* Mandate the order of parameters, columns, etc in a URL for efficient HTTP caching. Make $skip be multiples of 256 (?) and $top to be fixed at 256, again for efficient caching.
* Maybe change the encoding to ASN.1 PER over HTTP?
- Maybe drop OData altogether and autogenerate OpenAPI with all the query parameters.
- Tables and columns can be described by querying tables. Select * from tables;
* Implement encryption using a secret group key. This allows confidential data to be cached on a CDN.
* Add Templates. Before an INSERT, give the client a prepopulated template row for the user to populate. That prepopulated template should also have a new generated primary key ready to go. Effectively it's creating a row but keeping the transaction open so it can be rolled back if the user cancels.
* "Push updates" and subscriptions to tables for real-time multi-user stuff.
* Server-side column value verification. As the user types, ask the server whether the entered value is valid. Provide a descriptive reason to the user.
* Does Odata already support $select=path1/path2/path3 and $filter=(path1/path2/path3 eq 'foo')???
* Add transaction support... somehow. Maybe use something like a $transaction=XXX parameter. Maybe do something funky with
resources (tables) being immutable and getting new URIs if they change.
* Batch updates only? Only support POST and GET.
* Batch updates are always ordered.
* Batch updates can contain queries which set variables. --> Already exists?? Variables include User, current time, generated sequence results.
* As well as a primary key, mark a column in a table (or complex column) as the "display" one to show to the user when only showing a summary of that table (e.g. collapsed complex column, drop-down list).
* Change the column's name to a code indended for consumption by code.
* Add humanName, descriptions (tool tips) to columns. --> Annotations
* Maybe compress URLs. Perhaps do a funky URL encoding using one of the ASN.1 encodings and base64 it. The same
query on the same table version, built and encoded twice, would return the exact same URL. Give tables and columns
special shortened codes.
- Another idea: put the query in the body and use a 303 to a minified URL? Hmm.
* If compressed URLs are used, also provide some way of easily debugging them.
* Replace formulas with a proper programming language. Maybe compile to WebAssembly and run on the browser.
- Formula columns
- Triggers
- Maybe even batch updates?
- Or just ask the server to verify columns.
* Describe server limits
- Max number of retrievable rows.
- Any rate limiting?
- Check the RESTier limits.
* Formalize error messages and exceptions so that they can be coded against and recovered from. RFC 7807.
- "Query too difficult".
- "Row limit exceeded"
- "Progress: n%"
- "Timed out, partial data returned."
etc.
<file_sep>package gulik.dolichos;
import org.apache.olingo.commons.api.data.ContextURL;
import org.apache.olingo.commons.api.data.EntityCollection;
import org.apache.olingo.commons.api.edm.EdmEntitySet;
import org.apache.olingo.commons.api.edm.EdmEntityType;
import org.apache.olingo.commons.api.format.ContentType;
import org.apache.olingo.commons.api.http.HttpHeader;
import org.apache.olingo.commons.api.http.HttpStatusCode;
import org.apache.olingo.server.api.*;
import org.apache.olingo.server.api.processor.EntityCollectionProcessor;
import org.apache.olingo.server.api.serializer.EntityCollectionSerializerOptions;
import org.apache.olingo.server.api.serializer.ODataSerializer;
import org.apache.olingo.server.api.serializer.SerializerException;
import org.apache.olingo.server.api.serializer.SerializerResult;
import org.apache.olingo.server.api.uri.UriInfo;
import org.apache.olingo.server.api.uri.UriResource;
import org.apache.olingo.server.api.uri.UriResourceEntitySet;
import gulik.urad.Table;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
/** I accept HTTP requests for collections of entities. */
public class DolichosEntityCollectionProcessor extends EntityReader implements EntityCollectionProcessor {
// TODO: implements EntityCollectionProcessor, EntityProcessor,
// PrimitiveProcessor, PrimitiveValueProcessor, ComplexProcessor
private OData odata;
private ServiceMetadata serviceMetadata;
public DolichosEntityCollectionProcessor(List<Table> entitySets) {
super(entitySets);
}
public void init(OData odata, ServiceMetadata serviceMetadata) {
this.odata = odata;
this.serviceMetadata = serviceMetadata;
}
public void readEntityCollection(ODataRequest request, ODataResponse response, UriInfo uriInfo, ContentType responseFormat)
throws ODataApplicationException, SerializerException {
// I like how the OData tutorial says to use this code, but there's very
// little of your own business logic in it.
// 1st we have retrieve the requested EntitySet from the uriInfo object (representation of the parsed service URI)
List<UriResource> resourcePaths = uriInfo.getUriResourceParts();
UriResourceEntitySet uriResourceEntitySet = (UriResourceEntitySet) resourcePaths.get(0);
EdmEntitySet edmEntitySet = uriResourceEntitySet.getEntitySet();
// 2nd: fetch the data from backend for this requested EntitySetName
// it has to be delivered as EntitySet object
EntityCollection entitySet = toEntityCollection(toQuery(edmEntitySet, uriInfo).fetch());
// 3rd: create a serializer based on the requested format (json)
ODataSerializer serializer = odata.createSerializer(responseFormat);
// 4th: Now serialize the content: transform from the EntitySet object to InputStream
EdmEntityType edmEntityType = edmEntitySet.getEntityType();
ContextURL contextUrl = ContextURL
.with()
.entitySet(edmEntitySet)
.build();
final String id = request.getRawBaseUri() + "/" + edmEntitySet.getName();
EntityCollectionSerializerOptions opts = EntityCollectionSerializerOptions
.with()
.id(id)
.expand(uriInfo.getExpandOption()) // TODO - what does this do?
.contextURL(contextUrl)
.build();
SerializerResult serializerResult = serializer.entityCollection(serviceMetadata, edmEntityType, entitySet, opts);
InputStream serializedContent = serializerResult.getContent();
try { // TODO
serializedContent.transferTo(System.out);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
response.setContent(serializedContent);
response.setStatusCode(HttpStatusCode.OK.getStatusCode());
response.setHeader(HttpHeader.CONTENT_TYPE, responseFormat.toContentTypeString());
}
}
<file_sep>package gulik.urad.tableColumn;
import gulik.urad.ResultSet;
public class ForeignKeyColumn extends TableColumn {
private TableColumn keyColumn;
private ResultSet foreignTable;
private TableColumn foreignColumn;
}
<file_sep>package gulik.urad.queryColumn;
import gulik.urad.tableColumn.TableColumn;
public class QueryColumn {
private TableColumn origColumn;
private String title;
private int columnIndex;
public int getColumnIndex() {
return columnIndex;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public TableColumn getOrigColumn() {
return origColumn;
}
public void setOrigColumn(TableColumn origColumn) {
this.origColumn = origColumn;
}
public void setColumnIndex(int columnIndex) {
this.columnIndex = columnIndex;
}
public static QueryColumn from(TableColumn tc) {
QueryColumn result = new QueryColumn();
result.setOrigColumn(tc);
result.setTitle(tc.getTitle());
return result;
}
public String getName() {
return origColumn.getName();
}
@Override
public String toString() {
return origColumn.getName();
}
}
<file_sep>package gulik.urad.exceptions;
public class ColumnDoesNotExist extends UradException {
public ColumnDoesNotExist(String name) {
}
}
<file_sep>package gulik.urad;
import static org.junit.Assert.*;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import gulik.demo.VegetableServlet;
//import gulik.dolichos.*;
import org.junit.Test;
public class TestServlet {
/** Fetch that URL and check that the result contains the given string.
* @throws IOException
* @throws ServletException */
private void checkForResponse(String url, String contains) throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest("GET", url);
request.addParameter("odata-debug", "json");
request.addParameter("format", "json");
MockHttpServletResponse response = new MockHttpServletResponse();
// This doesn't work unfortunately. You still get HTML.
// request.setContentType("application/json");
HttpServlet sv = new VegetableServlet();
sv.init(null);
sv.service(request, response);
response.getWriter().flush();
String result = response.getContentAsString();
System.out.println(result);
assertTrue(result.contains(contains));
}
@Test
public void testServlet() throws Exception {
checkForResponse("/$metadata", "Vegetable");
}
@Test
public void testFruitProperty() throws Exception {
checkForResponse("/Fruit", "numberOfSeeds");
}
@Test
public void testVegetables() throws Exception {
checkForResponse("/Vegetables", "cabbage");
}
@Test
public void testVegetable() throws Exception {
checkForResponse("/Vegetables('brusselsprout')", "green");
}
}<file_sep>package gulik.urad.tableColumn;
public class TimestampColumn extends TableColumn {
}
<file_sep>package gulik.urad.queryColumn;
import java.util.List;
/* When the user expands a column, the TableColumn at that position
is replaced with an instance of me. */
public class ExpandedQueryColumn {
private List<QueryColumn> childs;
}
<file_sep>package gulik.dolichos;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.stream.Collectors;
import org.apache.olingo.commons.api.edm.EdmPrimitiveTypeKind;
import org.apache.olingo.commons.api.edm.FullQualifiedName;
import org.apache.olingo.commons.api.edm.provider.CsdlAbstractEdmProvider;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityContainer;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityContainerInfo;
import org.apache.olingo.commons.api.edm.provider.CsdlEntitySet;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityType;
import org.apache.olingo.commons.api.edm.provider.CsdlProperty;
import org.apache.olingo.commons.api.edm.provider.CsdlPropertyRef;
import org.apache.olingo.commons.api.edm.provider.CsdlSchema;
import org.apache.olingo.commons.api.ex.ODataException;
import org.apache.olingo.commons.api.http.HttpStatusCode;
import org.apache.olingo.server.api.ODataApplicationException;
import gulik.urad.Table;
import gulik.urad.Type;
import gulik.urad.exceptions.NotImplemented;
public class UradEdmProvider extends CsdlAbstractEdmProvider {
public List<Table> entitySets;
public UradEdmProvider(List<Table> entitySets) {
this.entitySets = entitySets;
}
private String namespace() {
return "namespace";
}
private String container() {
return "container";
}
@Override
public CsdlEntityType getEntityType(final FullQualifiedName entityTypeName) throws ODataException {
Table t = entitySets
.stream()
.filter(each -> each.getName().equals(entityTypeName.getName()))
.findFirst()
.orElseThrow(() -> new ODataException("Unknown entityTypeName: " + entityTypeName));
return createEntityTypeFrom(t);
}
/*
* private CsdlEntityType entityTypeFromMethod(Method me) {
* return createEntityTypeFromTable(columnDefinitionsFromMethod(me),
* entityName(me));
* }
*
* private Table columnDefinitionsFromMethod(Method me) throws RuntimeException
* {
* Query q = Query.queryDefinition();
* try {
* return (Table) me.invoke(endpoint.getConstructor().newInstance(), q);
* } catch (InstantiationException | IllegalAccessException |
* InvocationTargetException | NoSuchMethodException e) {
* throw new RuntimeException("Could not get a Table from " + me.getName() +
* "(q)", e);
* }
* }
*/
private CsdlEntityType createEntityTypeFrom(Table es) {
// TODO: check for nulls on everything the user might provide.
List<CsdlProperty> columns = es.getColumns().stream()
.map(c -> new CsdlProperty()
.setName(c.getName())
.setType(edmTypeOf(c.getType())))
.collect(Collectors.toList());
List<CsdlPropertyRef> primaryKey = es.getColumns().stream()
.filter(each -> each.isPrimaryKey())
.map(each -> new CsdlPropertyRef()
.setName(each.getName()))
.collect(Collectors.toList());
return new CsdlEntityType()
.setName(es.getName())
.setProperties(columns)
.setKey(primaryKey);
}
private FullQualifiedName edmTypeOf(Type uradType) {
switch (uradType) {
case Integer:
return EdmPrimitiveTypeKind.Int32.getFullQualifiedName();
case String:
return EdmPrimitiveTypeKind.String.getFullQualifiedName();
case Float:
return EdmPrimitiveTypeKind.Decimal.getFullQualifiedName(); // TODO: Decimal???
case Date:
return EdmPrimitiveTypeKind.Date.getFullQualifiedName();
case Boolean:
return EdmPrimitiveTypeKind.Boolean.getFullQualifiedName();
default:
throw new NotImplemented();
}
}
@Override
public CsdlEntitySet getEntitySet(FullQualifiedName entityContainer, String entitySetName) throws ODataException {
for (Table each : entitySets) {
String name = each.getName();
if (null==name) {
throw new Fail(each.getClass().getName()+".getName() returned null.");
}
if (name.equals(entitySetName)) {
CsdlEntitySet entitySet = new CsdlEntitySet();
entitySet.setName(name);
entitySet.setTitle("Human readable "+name); // TODO
// I don't understand what the type is meant to be.
entitySet.setType(new FullQualifiedName(this.namespace(), name));
return entitySet;
}
}
throw new Fail("Could not find entity set named " + entitySetName);
}
private class Fail extends ODataApplicationException {
public Fail(String message) {
super(message, HttpStatusCode.INTERNAL_SERVER_ERROR.getStatusCode(), Locale.ENGLISH);
}
}
@Override
public CsdlEntityContainerInfo getEntityContainerInfo(FullQualifiedName entityContainerName) throws ODataException {
// This method is invoked when displaying the Service Document at e.g.
// http://localhost:8080/DemoService/DemoService.svc
if (entityContainerName == null
|| entityContainerName.equals(new FullQualifiedName(this.namespace(), this.container()))) {
CsdlEntityContainerInfo entityContainerInfo = new CsdlEntityContainerInfo();
entityContainerInfo.setContainerName(new FullQualifiedName(this.namespace(), this.container()));
return entityContainerInfo;
}
return null;
}
@Override
public List<CsdlSchema> getSchemas() throws ODataException {
CsdlSchema schema = new CsdlSchema();
schema.setNamespace(this.namespace());
schema.setEntityTypes(
entitySets
.stream()
.map(each -> createEntityTypeFrom(each))
.collect(Collectors.toList()));
schema.setEntityContainer(getEntityContainer());
List<CsdlSchema> schemas = new ArrayList<CsdlSchema>();
schemas.add(schema);
return schemas;
}
@Override
public CsdlEntityContainer getEntityContainer() throws ODataException {
List<CsdlEntitySet> ess = new ArrayList<CsdlEntitySet>();
for (Table each : entitySets) {
CsdlEntitySet c = getEntitySet(
new FullQualifiedName(this.namespace(), this.container()),
each.getName());
ess.add(c);
}
return new CsdlEntityContainer()
.setName(this.container())
.setEntitySets(ess);
}
}
<file_sep>package gulik.dolichos;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Locale;
import org.apache.olingo.commons.api.Constants;
import org.apache.olingo.commons.api.data.Entity;
import org.apache.olingo.commons.api.data.EntityCollection;
import org.apache.olingo.commons.api.data.Link;
import org.apache.olingo.commons.api.data.Property;
import org.apache.olingo.commons.api.data.ValueType;
import org.apache.olingo.commons.api.edm.EdmElement;
import org.apache.olingo.commons.api.edm.EdmEntitySet;
import org.apache.olingo.commons.api.edm.EdmEntityType;
import org.apache.olingo.commons.api.edm.EdmNavigationProperty;
import org.apache.olingo.commons.api.edm.EdmNavigationPropertyBinding;
import org.apache.olingo.commons.api.edm.EdmProperty;
import org.apache.olingo.commons.api.ex.ODataRuntimeException;
import org.apache.olingo.commons.api.http.HttpStatusCode;
import org.apache.olingo.server.api.ODataApplicationException;
import org.apache.olingo.server.api.uri.UriInfo;
import org.apache.olingo.server.api.uri.UriInfoResource;
import org.apache.olingo.server.api.uri.UriResource;
import org.apache.olingo.server.api.uri.UriResourceNavigation;
import org.apache.olingo.server.api.uri.UriResourcePrimitiveProperty;
import org.apache.olingo.server.api.uri.queryoption.CountOption;
import org.apache.olingo.server.api.uri.queryoption.ExpandItem;
import org.apache.olingo.server.api.uri.queryoption.ExpandOption;
import org.apache.olingo.server.api.uri.queryoption.FilterOption;
import org.apache.olingo.server.api.uri.queryoption.OrderByItem;
import org.apache.olingo.server.api.uri.queryoption.OrderByOption;
import org.apache.olingo.server.api.uri.queryoption.SelectItem;
import org.apache.olingo.server.api.uri.queryoption.SelectOption;
import org.apache.olingo.server.api.uri.queryoption.SkipOption;
import org.apache.olingo.server.api.uri.queryoption.TopOption;
import org.apache.olingo.server.api.uri.queryoption.expression.Expression;
import org.apache.olingo.server.api.uri.queryoption.expression.ExpressionVisitException;
import org.apache.olingo.server.api.uri.queryoption.expression.Member;
import gulik.urad.Query;
import gulik.urad.Row;
import gulik.urad.Table;
import gulik.urad.queryColumn.QueryColumn;
import gulik.urad.ResultSet;
import gulik.urad.tableColumn.TableColumn;
import gulik.urad.value.Value;
public class EntityReader {
List<Table> entitySets;
public EntityReader(List<Table> entitySets) {
this.entitySets = entitySets;
}
protected Query getQueryByName(String name) {
for (Table each : entitySets) {
if (each.getName().equals(name)) {
return new Query(each);
}
}
throw new IndexOutOfBoundsException("Could not find entity set "+name);
}
protected EntityCollection toEntityCollection(ResultSet table) {
EntityCollection result = new EntityCollection();
if (table.hasCount()) {
result.setCount(table.getCount());
// If there is a count, we still return rows from the query.
}
List<Entity> entities = result.getEntities();
for (Row each : table) {
Entity next = toEntity(each, table);
System.out.println("Next: "+next.toString());
entities.add(next);
}
/* When you have expanded entities:
Link link = new Link();
link.setTitle(navPropName);
link.setInlineEntity(expandEntity);
entity.getNavigationLinks().add(link);
*/
return result;
}
protected Query toQuery(EdmEntitySet edmEntitySet, UriInfo uriInfo) throws ODataApplicationException {
Query result = getQueryByName(edmEntitySet.getName());
// $count=true
processCount(uriInfo, result);
// $select
processSelect(uriInfo, result);
// $expand
processExpand(edmEntitySet, uriInfo);
// $filter
processFilter(uriInfo);
// $orderBy
processOrderBy(uriInfo, result);
// $skip
processSkip(uriInfo, result);
// $top
processTop(uriInfo, result);
return result;
}
private void processSelect(UriInfo uriInfo, Query result) throws ODataApplicationException {
SelectOption selectOption = uriInfo.getSelectOption();
if (null != selectOption) {
for (SelectItem each : selectOption.getSelectItems()) {
List<UriResource> r = each.getResourcePath().getUriResourceParts();
if (r.size() > 1) {
throw new ODataApplicationException("Your $select is too fancy for my pathetic code.",
HttpStatusCode.INTERNAL_SERVER_ERROR.getStatusCode(),
Locale.ENGLISH);
}
result.select(r.get(0).getSegmentValue());
}
}
}
private void processCount(UriInfo uriInfo, Query result) {
CountOption countOption = uriInfo.getCountOption();
boolean isCount = null != countOption && countOption.getValue();
if (isCount) {
result.selectCount();
}
}
private void processTop(UriInfo uriInfo, Query result) {
TopOption topOption = uriInfo.getTopOption();
if (null!=topOption) {
result.top(topOption.getValue());
}
}
private void processSkip(UriInfo uriInfo, Query result) {
SkipOption skipOption = uriInfo.getSkipOption();
if (null!=skipOption) {
result.skip(skipOption.getValue());
}
}
private void processFilter(UriInfo uriInfo) throws ODataApplicationException {
FilterOption filterOption = uriInfo.getFilterOption();
if (null!=filterOption) {
// This isn't actually that bad. I'm going to steal this pattern.
Expression filterExpression = filterOption.getExpression();
FilterExpressionVisitor v = new FilterExpressionVisitor();
try {
filterExpression.accept(v);
} catch (ExpressionVisitException e) {
throw new ODataApplicationException("Failed to evaluate expression.",
HttpStatusCode.INTERNAL_SERVER_ERROR.getStatusCode(),
Locale.ENGLISH,
e);
}
}
}
private void processExpand(EdmEntitySet edmEntitySet, UriInfo uriInfo) {
if (true) return; // Copypasta code below doesn't work yet.
EdmNavigationProperty edmNavigationProperty=null;
ExpandOption expandOption = uriInfo.getExpandOption();
if (null!=expandOption) {
ExpandItem expandItem = expandOption.getExpandItems().get(0);
if (expandItem.isStar()) {
List<EdmNavigationPropertyBinding> bindings = edmEntitySet.getNavigationPropertyBindings();
// we know that there are navigation bindings
// however normally in this case a check if navigation bindings exists is done
if (!bindings.isEmpty()) {
// can in our case only be 'Category' or 'Products', so we can take the first
EdmNavigationPropertyBinding binding = bindings.get(0);
EdmElement property = edmEntitySet.getEntityType().getProperty(binding.getPath());
// we don't need to handle error cases, as it is done in the Olingo library
if (property instanceof EdmNavigationProperty) {
edmNavigationProperty = (EdmNavigationProperty) property;
}
}
} else {
// can be 'Category' or 'Products', no path supported
UriResource uriResource = expandItem.getResourcePath().getUriResourceParts().get(0);
// we don't need to handle error cases, as it is done in the Olingo library
if (uriResource instanceof UriResourceNavigation) {
edmNavigationProperty = ((UriResourceNavigation) uriResource).getProperty();
}
}
if(edmNavigationProperty != null) {
EdmEntityType expandEdmEntityType = edmNavigationProperty.getType();
String navPropName = edmNavigationProperty.getName();
// build the inline data
Link link = new Link();
link.setTitle(navPropName);
link.setType(Constants.ENTITY_NAVIGATION_LINK_TYPE);
link.setRel(Constants.NS_ASSOCIATION_LINK_REL + navPropName);
if(edmNavigationProperty.isCollection()){ // in case of Categories(1)/$expand=Products
// fetch the data for the $expand (to-many navigation) from backend
// here we get the data for the expand
EntityCollection expandEntityCollection = null; // storage.getRelatedEntityCollection(entity, expandEdmEntityType);
link.setInlineEntitySet(expandEntityCollection);
link.setHref(expandEntityCollection.getId().toASCIIString());
} else { // in case of Products(1)?$expand=Category
// fetch the data for the $expand (to-one navigation) from backend
// here we get the data for the expand
Entity expandEntity = null; // storage.getRelatedEntity(entity, expandEdmEntityType);
link.setInlineEntity(expandEntity);
link.setHref(expandEntity.getId().toASCIIString());
}
// set the link - containing the expanded data - to the current entity
Entity entity = null; // TODO.
entity.getNavigationLinks().add(link);
}
}
}
private void processOrderBy(UriInfo uriInfo, Query result) {
OrderByOption orderByOption = uriInfo.getOrderByOption();
if (orderByOption != null) {
List<OrderByItem> orderItemList = orderByOption.getOrders();
for(OrderByItem each : orderItemList) {
Expression expression = each.getExpression();
if (expression instanceof Member) {
UriInfoResource resourcePath = ((Member) expression).getResourcePath();
UriResource uriResource = resourcePath.getUriResourceParts().get(0);
if (uriResource instanceof UriResourcePrimitiveProperty) {
EdmProperty edmProperty = ((UriResourcePrimitiveProperty) uriResource).getProperty();
final String sortPropertyName = edmProperty.getName();
result.orderBy(sortPropertyName);
}
}
}
}
}
/** Convert the given row to an Entity. The table is required for column definitions. */
protected Entity toEntity(Row row, ResultSet table) {
/* This is a bit wasteful - we're creating objects to throw them away. A future version could make this more directly
from the network.
*/
Entity result = new Entity();
for (QueryColumn eachColumn : table.getColumns()) {
Value v = row.get(eachColumn.getColumnIndex());
Property p = new Property(null, eachColumn.getName(), ValueType.PRIMITIVE, v.value());
result.addProperty(p);
}
// TODO: we assume there is only one primary key.
Object primaryKey = row.get(table.getPrimaryKey().get(0).getColumnIndex());
result.setId(createId(table.getName(), primaryKey));
return result;
}
private URI createId(String entitySetName, Object id) {
try {
return new URI(entitySetName + "(" + String.valueOf(id) + ")");
} catch (URISyntaxException e) {
throw new ODataRuntimeException("Unable to create id for entity: " + entitySetName, e);
}
}
}
<file_sep>package gulik.urad.tableColumn;
public class IntegerColumn extends TableColumn {
}
<file_sep>package gulik.urad.exceptions;
public abstract class UradException extends RuntimeException {
}
<file_sep># What is this?
I am a library that makes it easier to create OData services in Java. I am built on Apache Olingo.
[](https://gitpod.io/#https://github.com/mikevdg/urad)
# Project status
It doesn't compile yet.
# Project structure
This project is work in progress. It contains several future projects in the same build package. These will be split up
when they work well enough to be separately maintained:
* dolichos - annotations and framework stuff for making an OData service.
* urad - a query framework.
* demo - a quick web app hack to test my stuff. This will be discarded.
* (TODO) - Spring integration?? In particular, Spring security?
* (TODO) - OAuth integration??
To run me, point your browser at http://localhost:8080/ after you do:
```shell script
$ mvn jetty:run
```
# Dolichos
This library contains annotations and a Servlet implementation for creating OData services.
Services can be defined by::
``` java
@ODataEndpoint(namespace="people", container="container")
public class PersonController { // I'm not Spring. Don't get confused.
// HTTP GET
@GetEntities("Person")
public Table getPersons(Query q) {
// Insert pre-query business logic here.
return new JPAQueryable(Person.class)
.query(q);
// Insert post-query business logic here.
}
// HTTP POST
@CreateEntities("Person")
public Table createPerson(Table person) {
return new JPAQueryable(Person.class)
.create(person);
}
// HTTP PUT
@UpdateEntities("Person")
public Table updatePerson(Table person) {
return new JPAQueryable(Person.class)
.update(person);
}
// HTTP DELETE
@DeleteEntities("Person")
public void deletePerson(Table person) {
return new JPAQueryable(Person.class)
.delete(person);
}
}
```
This allows the implementer to:
* Include business logic before and after a query.
* Inspect the query before it runs, e.g. to disallow dangerously heavy queries.
* Modify the query before it runs, e.g. to trim excessive column navigation.
* To add stuff to the result, e.g. from two or more queries.
* To create his own Queryable and Table classes for very custom behaviour.
Then you define it as a servlet:
``` xml
<servlet>
<servlet-name>odata</servlet-name>
<servlet-class>gulik.dolichos.ODataServlet</servlet-class>
<load-on-startup>1</load-on-startup>
<init-param>
<param-name>namespace</param-name>
<param-value>gulik.demo</param-value>
</init-param>
</servlet>
```
The implementation will search that Java namespace recursively at start-up and register all classes annotated with @ODataEndpoint.
# Urad
Urad is a Java library for managing queries on tabular data. I allow for SQL-esque queries to be performed on
standard Java lists, relational databases, via JPA bindings, or for fancier things such as OData or other
REST services.
I have the following interfaces, which should be reasonably straight-forward:
A Table is an iterable entity that has column metadata. When a client requests $metadata, this will get an empty
query to fetch the metadata. It returns Row objects. Both Table and Row are interfaces that the user can implement
should the provided implementations not be satisfactory.
A Query is a manipulatable object containing the query parameters from the user. It basically contains a logical
SQL SELECT statement.
The Queryable objects convert Queries to Tables. Tables need metadata that describe their columns and structure.
Urad will provide at least a JPAQueryable that uses the JPA annotations to build up the metadata, and maybe
an SQLQueryable that works using JDBC's metadata mechanisms (?). Queryables can also be made by the user.
To use me::
```java
// Say that we have a list of people. Person is probably annotated with JPA.
List<Person> people = new ArrayList<>();
people.add(new Person("Alice"), Gender.female, 33);
people.add(new Person("Bob"), Gender.male, 18);
// First make a query. This is a bit like jOOQ, but geared towards OData.
Query q = new Query()
.select("name")
.select("age")
.from("Person")
.where(equal("name", "Bob" ))
.orderBy("age")
.top(10) // windowing: get results 10 through 20.
.skip(10);
// Then we apply it to a Queryable. CollectionQueryable works with any Java collection.
Queryable queryMe = new CollectionQueryable(people);
Table result = queryMe.query(q);
// Print out the columns
for (Column eachColumn : result.columns()) {
System.out.print(String.format("|.15s|", eachColumn.getTitle()));
}
System.out.println();
// Now we can iterate over the results
// This looks a bit funky. It's like this so that if the Queryable is based on SQL
// queries, it can close the ResultSet when done.
result.iterate( (eachRow) -> {
for (Value eachValue : eachRow.values()) {
System.out.print(String.format("|.15s|", eachValue);
}
System.out.println();
});
```
Note how the result is a Table of Rows. If we choose to "select" individual columns, we cannot return
POJOs.
There are no joins. Instead, we assume the columns on foreign keys are navigable using some metadata such as JPA
bindings, other annotations or some schema mechanism. In particular, this framework assumes a
set of "root" entities and navigable properties.
For example, say that Person was (keeping in mind that it might be data from SQL or REST)::
```java
public class Person {
String name,
int age,
Gender gender,
List<Person> friends
}
```
Then we could return a table containing two columns: a name, and all our friends ages, by::
```java
Query q = new Query()
.select("name")
.select("friends/age")
.orderBy("friends/age");
return new CollectionQueryable(people).query(q);
```
Columns are navigable using OData syntax with slashes between columns, e.g. "friends/name".
TODO: Add HttpServletRequest, HttpServletResponse to method parameters?
TODO: How to wrap a bulk update in a transaction? Or do transactions in general?
TODO: Can I help with data migrations and data refactoring if I have full schema awareness, aka liquibase?
TODO: Is Querydsl worth investigating?
# Project goals
This is intended to be a component of a complete OData stack comprising:
* Chickpea - OData front-end
* Runner - OData test framework
* Urad - Query framework
* Dolichos - OData annotations web service
TODO: A code generator to create a JPA-like metamodel for column names.
<file_sep>package gulik.urad.queryables;
import gulik.urad.Query;
import gulik.urad.ResultSet;
/** I am a "utility" class to convert a Query into a Table. I perform the given query on whatever
* data you set me up with.
*
* TODO: Am I just a "Table"?
*/
public interface Queryable {
/** Perform the given query and return a result. */
ResultSet query(Query q);
}
<file_sep>package gulik.dolichos;
import java.util.ArrayList;
import java.util.List;
import gulik.demo.NotImplementedException;
import gulik.urad.Type;
import gulik.urad.tableColumn.BooleanColumn;
import gulik.urad.tableColumn.FloatColumn;
import gulik.urad.tableColumn.IntegerColumn;
import gulik.urad.tableColumn.StringColumn;
import gulik.urad.tableColumn.TableColumn;
import gulik.urad.tableColumn.TimestampColumn;
/** Fluent API helper to create lists of columns. */
public class ColumnListBuilder {
private List<TableColumn> columns = new ArrayList<>(20);
/** Add another column. */
public ColumnListBuilder add(String name, Type type) {
TableColumn newColumn;
switch (type) {
case Integer:
newColumn = new IntegerColumn();
break;
case Float:
newColumn = new FloatColumn();
break;
case String:
newColumn = new StringColumn();
break;
case Timestamp:
newColumn = new TimestampColumn();
break;
case Date:
newColumn = new TimestampColumn(); // TODO: DateColumn.
break;
case Boolean:
newColumn = new BooleanColumn();
break;
default:
throw new NotImplementedException();
}
newColumn
.setName(name)
.setType(type);
columns.add(newColumn);
return this;
}
/** Define the primary key for this table. */
public ColumnListBuilder pk(String one) {
for (TableColumn each : columns) {
if (each.getName().equals(one)) {
each.setPrimaryKey(true);
}
}
return this;
}
/** Define the primary key for this table. */
public ColumnListBuilder pk(String one, String two) {
for (TableColumn each : columns) {
if (each.getName().equals(one)) {
each.setPrimaryKey(true);
}
if (each.getName().equals(two)) {
each.setPrimaryKey(true);
}
}
return this;
}
public List<TableColumn> build() {
return columns;
}
}
<file_sep>package gulik.demo;
public class NotImplementedException extends RuntimeException {
}
<file_sep>package gulik.urad.queryables.collection;
import java.util.Iterator;
import gulik.urad.Row;
public class CollectionIterator implements Iterator<Row> {
private final RowGenerator rowGenerator;
private Iterator<?> subIterator;
public CollectionIterator(RowGenerator rg) {
this.rowGenerator = rg;
subIterator = rg.sourceIterator();
}
@Override
public boolean hasNext() {
return subIterator.hasNext();
}
@Override
public Row next() {
return rowGenerator.toRow(subIterator.next());
}
}
<file_sep>package gulik.urad;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.stream.Collectors;
import org.junit.Test;
import gulik.demo.VegetableTable;
import gulik.urad.exceptions.ColumnDoesNotExist;
public class TestCollection {
@Test
public void testOrderByName() {
Table t = new VegetableTable();
ResultSet result = new VegetableTable().select().orderBy("name").fetch();
List<Row> v = result.stream().collect(Collectors.toList());
assertTrue(v.get(0).getByName("name").equals("'alfalfa'"));
assertTrue(v.get(1).getByName("name").equals("'brusselsprout'"));
assertTrue(v.get(2).getByName("name").equals("'cabbage'"));
}
@Test
public void testOrderByDate() {
ResultSet result = new VegetableTable().select().orderBy("planted").fetch();
List<Row> v = result.stream().collect(Collectors.toList());
assertTrue(v.get(0).getByName("planted").toString().equals("2000-01-02"));
assertTrue(v.get(1).getByName("planted").toString().equals("2000-01-03"));
assertTrue(v.get(2).getByName("planted").toString().equals("2000-01-04"));
}
@Test
public void testOrderByWeight() {
Table t = new VegetableTable();
ResultSet result = t.select().orderBy("weight").fetch();
List<Row> v = result.stream().collect(Collectors.toList());
assertTrue(v.get(0).getByName("weight").equals("2"));
assertTrue(v.get(1).getByName("weight").equals("5"));
assertTrue(v.get(2).getByName("weight").equals("10"));
}
@Test
public void testSelect() {
Table t = new VegetableTable();
ResultSet result = t.select("planted", "weight").fetch();
assertEquals(result.getColumnNumber("planted"), 0);
assertEquals(result.getColumnNumber("weight"), 1);
try {
result.getColumnNumber("name");
fail();
} catch (ColumnDoesNotExist e) {
}
}
@Test
public void testSelectAll() {
Table t = new VegetableTable();
ResultSet result = t.select().fetch();
// We have no guarantees about the ordering of the columns.
// Reflection on classes cannot tell us this.
HashSet<Integer> h = new HashSet<Integer>();
h.add(result.getColumnNumber("name"));
h.add(result.getColumnNumber("colour"));
h.add(result.getColumnNumber("childrenLikeIt"));
h.add(result.getColumnNumber("weight"));
assertTrue(h.size() == 4);
}
static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
private static Date date(String d) {
try {
return dateFormat.parse(d);
} catch (ParseException e) {
throw new RuntimeException(e);
}
}
}
<file_sep>package gulik.urad;
import gulik.urad.queryColumn.QueryColumn;
import gulik.urad.value.Value;
/** I am a row from a Table. I'm a "dumb" object; a Table is a "smart" object. All of
* the column definitions and intelligence is in the Table implementation.
*
* Don't rely on me being an immutable object. It is implementation specific, but I might be re-used
* in the next iteration.
*/
public class Row {
private ResultSet source;
private final Value[] values;
private final Value[] primaryKey;
/** Private constructor, used when you do query(). The source ResultSet is my container. */
public Row(ResultSet source, int numColumns) {
this.source = source;
values = new Value[numColumns];
for (int i=0; i<numColumns; i++) {
values[i] = Value.NULL;
}
primaryKey = new Value[numColumns];
for (int i=0; i<numColumns; i++) {
primaryKey[i] = Value.NULL;
}
}
public Value get(int columnNum) {
return values[columnNum];
}
public void set(int columnNum, Value v) {
values[columnNum] = v;
}
public Value getByName(String name) {
QueryColumn column = source.getColumnByName(name);
return get(column.getColumnIndex());
}
public Value getPrimaryKey(int columnNum) {
return primaryKey[columnNum];
}
public void setPrimaryKey(int columnNum, Value v) {
primaryKey[columnNum] = v;
}
}
<file_sep>package gulik.dolichos;
import gulik.urad.exceptions.NotImplemented;
import gulik.urad.Query;
import gulik.urad.ResultSet;
import gulik.urad.Table;
import gulik.urad.where.Clause;
import org.apache.olingo.commons.api.data.ContextURL;
import org.apache.olingo.commons.api.data.Entity;
import org.apache.olingo.commons.api.edm.*;
import org.apache.olingo.commons.api.format.ContentType;
import org.apache.olingo.commons.api.http.HttpHeader;
import org.apache.olingo.commons.api.http.HttpStatusCode;
import org.apache.olingo.server.api.*;
import org.apache.olingo.server.api.processor.EntityProcessor;
import org.apache.olingo.server.api.serializer.EntitySerializerOptions;
import org.apache.olingo.server.api.serializer.ODataSerializer;
import org.apache.olingo.server.api.serializer.SerializerResult;
import org.apache.olingo.server.api.uri.UriInfo;
import org.apache.olingo.server.api.uri.UriParameter;
import org.apache.olingo.server.api.uri.UriResource;
import org.apache.olingo.server.api.uri.UriResourceEntitySet;
import java.io.InputStream;
import java.util.List;
/** I accept HTTP requests for single entities. */
public class DolichosEntityProcessor extends EntityReader implements EntityProcessor {
private OData odata;
private ServiceMetadata serviceMetadata;
public DolichosEntityProcessor(List<Table> entitySets) {
super(entitySets);
}
@Override
public void readEntity(ODataRequest request, ODataResponse response, UriInfo uriInfo, ContentType responseFormat) throws ODataApplicationException, ODataLibraryException {
// This doesn't work yet. I get a message about the key always being invalid before
// this code even gets executed.
// 1. retrieve the Entity Type
List<UriResource> resourcePaths = uriInfo.getUriResourceParts();
// Note: only in our example we can assume that the first segment is the EntitySet
UriResourceEntitySet uriResourceEntitySet = (UriResourceEntitySet) resourcePaths.get(0);
EdmEntitySet edmEntitySet = uriResourceEntitySet.getEntitySet();
// 2. retrieve the data from backend
List<UriParameter> keyPredicates = uriResourceEntitySet.getKeyPredicates();
Entity entity = readEntity(edmEntitySet, keyPredicates);
// 3. serialize
EdmEntityType entityType = edmEntitySet.getEntityType();
ContextURL contextUrl = ContextURL.with().entitySet(edmEntitySet).build();
// expand and select currently not supported
EntitySerializerOptions options = EntitySerializerOptions.with().contextURL(contextUrl).build();
ODataSerializer serializer = odata.createSerializer(responseFormat);
SerializerResult serializerResult = serializer.entity(serviceMetadata, entityType, entity, options);
InputStream entityStream = serializerResult.getContent();
//4. configure the response object
response.setContent(entityStream);
response.setStatusCode(HttpStatusCode.OK.getStatusCode());
response.setHeader(HttpHeader.CONTENT_TYPE, responseFormat.toContentTypeString());
}
private Entity readEntity(EdmEntitySet edmEntitySet, List<UriParameter> keyPredicates) throws ODataApplicationException {
EdmEntityType edmEntityType = edmEntitySet.getEntityType();
Query query = getQueryByName(edmEntitySet.getName());
for (final UriParameter key : keyPredicates) {
String keyName = key.getName();
String keyValue = key.getText();
query.where(Clause.equal(keyName, keyValue));
}
ResultSet table = query.fetch();
return toEntity(table.stream().findFirst().get(), table);
}
@Override
public void createEntity(ODataRequest request, ODataResponse response, UriInfo uriInfo, ContentType requestFormat, ContentType responseFormat) throws ODataApplicationException, ODataLibraryException {
throw new NotImplemented();
}
@Override
public void updateEntity(ODataRequest request, ODataResponse response, UriInfo uriInfo, ContentType requestFormat, ContentType responseFormat) throws ODataApplicationException, ODataLibraryException {
throw new NotImplemented();
}
@Override
public void deleteEntity(ODataRequest request, ODataResponse response, UriInfo uriInfo) throws ODataApplicationException, ODataLibraryException {
throw new NotImplemented();
}
@Override
public void init(OData odata, ServiceMetadata serviceMetadata) {
this.odata = odata;
this.serviceMetadata = serviceMetadata;
}
}
<file_sep>package gulik.urad.where;
public class True extends Clause {
private static final True myInstance = new True();
public static True instance() {
return myInstance;
}
}
<file_sep>package gulik.urad.tableColumn;
public class StringColumn extends TableColumn {
}
<file_sep>package gulik.urad.value;
import java.util.Date;
import java.util.Objects;
public abstract class Value implements Comparable {
public static Value NULL = new NullValue();
public static Value of(Object something) {
if (null==something) return NULL;
if (something instanceof String) {
return new StringValue((String)something);
}
if (something instanceof Integer) {
return new IntegerValue((Integer)something);
}
if (something instanceof Date) {
// TODO: ... timestamp?
return new DateValue((Date)something);
}
if (something instanceof Boolean) {
return new BooleanValue((Boolean)something);
}
throw new RuntimeException("Can't make a Value out of "+ Objects.toString(something));
}
public abstract Object value();
public int compareTo(Object x) {
if (null==x) {
return 1;
}
Value to = (Value)x;
if (!(value() instanceof Comparable)) {
throw new NotComparable();
}
if (!(to.value() instanceof Comparable)) {
throw new NotComparable();
}
return ((Comparable)value()).compareTo(((Comparable)to.value()));
}
}
<file_sep>package gulik.dolichos;
import org.apache.olingo.commons.api.edm.EdmEnumType;
import org.apache.olingo.commons.api.edm.EdmType;
import org.apache.olingo.commons.api.http.HttpStatusCode;
import org.apache.olingo.server.api.ODataApplicationException;
import org.apache.olingo.server.api.uri.UriInfoResource;
import org.apache.olingo.server.api.uri.UriResource;
import org.apache.olingo.server.api.uri.UriResourcePrimitiveProperty;
import org.apache.olingo.server.api.uri.queryoption.expression.*;
import java.util.List;
import java.util.Locale;
public class FilterExpressionVisitor implements ExpressionVisitor<Object> {
@Override
public Object visitBinaryOperator(BinaryOperatorKind binaryOperatorKind, Object value, Object t1) throws ExpressionVisitException, ODataApplicationException {
throw new ODataApplicationException("TODO: Not implemented",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public Object visitUnaryOperator(UnaryOperatorKind unaryOperatorKind, Object value) throws ExpressionVisitException, ODataApplicationException {
throw new ODataApplicationException("TODO: Not implemented",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public Object visitMethodCall(MethodKind methodKind, List<Object> list) throws ExpressionVisitException, ODataApplicationException {
throw new ODataApplicationException("TODO: Not implemented",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public Object visitLambdaExpression(String s, String s1, Expression expression) throws ExpressionVisitException, ODataApplicationException {
throw new ODataApplicationException("TODO: Not implemented",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public Object visitLiteral(Literal literal) throws ExpressionVisitException, ODataApplicationException {
throw new ODataApplicationException("TODO: Not implemented",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public Object visitMember(Member member) throws ExpressionVisitException, ODataApplicationException {
// Unprocessed copypasta below.
// To keeps things simple, this tutorial allows only primitive properties.
// We have faith that the java type of Edm.Int32 is Integer
//final List<UriResource> uriResourceParts = member.getUriResourceParts();
UriInfoResource uir = member.getResourcePath(); // ???
// Make sure that the resource path of the property contains only a single segment and a
// primitive property has been addressed. We can be sure, that the property exists because
// the UriParser checks if the property has been defined in service metadata document.
/* TODO
if(uriResourceParts.size() == 1 && uriResourceParts.get(0) instanceof UriResourcePrimitiveProperty) {
UriResourcePrimitiveProperty uriResourceProperty = (UriResourcePrimitiveProperty) uriResourceParts.get(0);
return currentEntity.getProperty(uriResourceProperty.getProperty().getName()).getObject();
} else {
// The OData specification allows in addition complex properties and navigation
// properties with a target cardinality 0..1 or 1.
// This means any combination can occur e.g. Supplier/Address/City
// -> Navigation properties Supplier
// -> Complex Property Address
// -> Primitive Property City
// For such cases the resource path returns a list of UriResourceParts
throw new ODataApplicationException("Only primitive properties are implemented in filter
expressions", HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
} */
throw new ODataApplicationException("TODO: Not implemented",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public Object visitAlias(String s) throws ExpressionVisitException, ODataApplicationException {
throw new ODataApplicationException("TODO: Not implemented",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public Object visitTypeLiteral(EdmType edmType) throws ExpressionVisitException, ODataApplicationException {
throw new ODataApplicationException("TODO: Not implemented",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public Object visitLambdaReference(String s) throws ExpressionVisitException, ODataApplicationException {
throw new ODataApplicationException("TODO: Not implemented",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public Object visitEnum(EdmEnumType edmEnumType, List<String> list) throws ExpressionVisitException, ODataApplicationException {
throw new ODataApplicationException("TODO: Not implemented",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public Object visitBinaryOperator(BinaryOperatorKind binaryOperatorKind, Object value, List<Object> list) throws ExpressionVisitException, ODataApplicationException {
throw new ODataApplicationException("TODO: Not implemented",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
}
<file_sep>package gulik.urad.impl;
import java.util.List;
import gulik.demo.NotImplementedException;
import gulik.urad.Query;
import gulik.urad.ResultSet;
import gulik.urad.Table;
import gulik.urad.tableColumn.TableColumn;
public abstract class AbstractTable implements Table {
public abstract String getName();
public abstract List<TableColumn> getColumns();
public Query select() {
return new Query(this);
}
public Query select(String column1) {
return new Query(this).select(column1);
}
public Query select(String column1, String column2) {
return new Query(this).select(column1).select(column2);
}
public Query select(String column1, String column2, String column3) {
return new Query(this).select(column1).select(column2).select(column3);
}
// TODO
public ResultSet create(ResultSet t) {
throw new NotImplementedException();
};
// TODO
public ResultSet update(ResultSet t) {
throw new NotImplementedException();
};
// TODO
public ResultSet delete(ResultSet t){
throw new NotImplementedException();
} ;
}
<file_sep>package gulik.urad.where;
public class Not extends O {
public Not(O notThis) {
super(notThis, null);
}
}
<file_sep>package gulik.urad;
import java.util.List;
import gulik.urad.tableColumn.TableColumn;
/* TODO: Is this just a "Table"?
Do we need to have a differentiation between a table as a ResultSet and a table as a TableDefinition?
If you run a query, what you get back is tabular. Can it be a table? Is there a difference between a table and a query result?
Tables have concrete implementations. Query results are generated. I guess they share the same interface.
*/
public interface Table {
public String getName();
public List<TableColumn> getColumns();
/* Create a new query. */
public Query select();
public Query select(String column1);
public Query select(String column1, String column2);
public Query select(String column1, String column2, String column3);
/* To get results from a query, call Query.fetch(). This method is used internally as the implementation of Query.fetch(). */
public ResultSet fetch(Query q);
public ResultSet create(ResultSet t);
public ResultSet update(ResultSet t);
public ResultSet delete(ResultSet t);
}<file_sep>package gulik.dolichos;
import org.apache.olingo.commons.api.edmx.EdmxReference;
import org.apache.olingo.server.api.OData;
import org.apache.olingo.server.api.ODataHttpHandler;
import org.apache.olingo.server.api.ODataResponse;
import org.apache.olingo.server.api.ServiceMetadata;
import org.apache.olingo.server.api.debug.DebugInformation;
import org.apache.olingo.server.api.debug.DefaultDebugSupport;
import gulik.urad.Table;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
public abstract class ODataServlet extends HttpServlet {
private static final long serialVersionUID = 202204221052L;
private static final Logger log = Logger.getLogger(ODataServlet.class.getCanonicalName());
ODataHttpHandler handler;
protected abstract List<Table> getEntitySets();
public void init(ServletConfig config) throws ServletException {
List<Table> entitySets = getEntitySets();
OData odata = OData.newInstance();
ServiceMetadata edm = odata.createServiceMetadata(new UradEdmProvider(entitySets), new ArrayList<EdmxReference>());
this.handler = odata.createHandler(edm);
handler.register(new PrintStacktraceDebugSupport());
// For EntitySets
handler.register(new DolichosEntityCollectionProcessor(entitySets));
// For individual entities.
handler.register(new DolichosEntityProcessor(entitySets));
}
protected void service(final HttpServletRequest req, final HttpServletResponse resp) throws ServletException, IOException {
try {
// let the handler do the work
handler.process(req, resp);
} catch (RuntimeException e) {
log.log(Level.SEVERE, "Server Error occurred in ExampleServlet", e);
throw new ServletException(e);
}
}
// TODO: only for development.
private class PrintStacktraceDebugSupport extends DefaultDebugSupport {
@Override
public ODataResponse createDebugResponse(final String debugFormat, final DebugInformation debugInfo) {
Exception e = debugInfo.getException();
if (null!=e) {
log.log(Level.WARNING, "OData error:", debugInfo.getException());
}
return super.createDebugResponse(debugFormat, debugInfo);
}
}
}
<file_sep>package gulik.urad.where;
public class Or extends O {
public Or(Clause left, Clause right) {
super(left, right);
}
}
| d0db3a2651d276825d98c288389f72a16c0accf6 | [
"Markdown",
"Java"
] | 31 | Java | mikevdg/urad | 10cfd87bca7f23011a2b72f229bac075393ca7d4 | 300c0a18bb48ea4e07f5a0516133014be18b956a |
refs/heads/master | <repo_name>EesTiK/Daniil<file_sep>/Pygame_Helilaid.py
# здесь подключаются модули
import pygame
# здесь определяются константы, классы и функции
FPS = 60
d = 0
# здесь происходит инициация, создание объектов и др.
pygame.init()
pygame.display.set_mode((600, 400))
clock = pygame.time.Clock()
# если надо до цикла отобразить объекты на экране
pygame.display.update()
# главный цикл
while True:
# задержка
clock.tick(FPS)
# цикл обработки событий
for i in pygame.event.get():
if i.type == pygame.QUIT:
exit()
pygame.display.set_caption(str(d))
d += 1
# --------
# изменение объектов и многое др.
# --------
# обновление экрана
pygame.display.update()
#-----------------------------------
import pygame
FPS = 60
WIN_WIDTH = 500
WIN_HEIGHT = 140
x1=0 #координата х
y1=30 #координата y
d=80 #длина квадрата
w=80 #ширина квадрата
WHITE = (255, 255, 255)#цвет белый
ORANGE = (255, 150, 100)#цвет оранжевый
pygame.init()
clock = pygame.time.Clock()
sc=pygame.display.set_mode((WIN_WIDTH,WIN_HEIGHT))#создаем окно игры
k=2 #константа направления движения, тут чтобы движение было вначале направо
while 1:
sc.fill(WHITE)#удаление фигуры
a=pygame.event.get() # присваиваем переменную, чтобы окно закрылось без зависаний
for j in a:#закрытие окна
if j.type==pygame.QUIT:
pygame.quit()
pygame.draw.rect(sc, ORANGE, (x1,y1,d,w))#рисуем квадрат
pygame.display.update()
if x1==WIN_WIDTH-80:# если кооснулось правой стенки то
k=-2 #движение в обратную сторону
x1+=k
elif x1==0: # движение влево после того как координата х = 0
k=2
x1+=k
else: # движение в правую сторону от левой стенки
x1+=k
clock.tick(FPS)<file_sep>/README.md
# Daniil
Helilai_Arlamov.docx-Ошибки и исключения.
Обработка исключений.python
Helilaid_Arlamov_61119.py-Решение задач в Python
SPTVR19 Helilaid,Arlamov.py-Решение задач в Python
string_Arlamov&Hellilaid.py-Решение задач в Python
Социальные сети.docx-Реферат
<file_sep>/SPTVR19 Helilaid,Arlamov.py
#1
n = 1
for i in range(1000,100,1003):
int(input(n))
#2
a = 1
b = 1
for i in range(a,b):
a <= 55
a++b
b = b+2
print("b")
#3
x = 90
while x >=0:
print(x)
x =- 5
#4
a = 1
b = 1
for i in range(a,b):
int(input(a,b))
a<=55
a++b
#5
a = 2
while a < 9999:
print(a)
a = 2*a-1
#6
a = -166
while a < 100:
if -100 < a < -9 or 9 < a <100:
print(a)
a = 2*(9-1)+200
#7
h = int(input('n ='))
i = 1
b = 1
while i<=n:
b*=1
i+=1
print(b)
#8
n = int(input('n = 1'))
b = 10
i = 1
for i in range(1,999):
if b == n%:
print(i)
#9
a = int(input())
i = int(a ** 0.5)
while i >= 1:
if(a % i == 0 and i != 1):
print("Число составное")
break
if(i == 1):
print("Число простое")
i = i - 1
#10
a = -166
i = 0
while a < 100:
if(a > -100 and (a < -9 or a > 9)):
print(a)
a = 2 * a + 200
i = i + 1
#11
neChet = 3
chet = 2
i = 3
while i <=20:
if(i%2 != 0):
neChet= 2 * neChet - 2
print(neChet)
else:
chet = 2 * chet -2
print(chet)
i = i + 1
#12
n = int(input())
sum = 0
while(n != 0):
sum = sum + (n % 10)
n = n //10
print(sum)
<file_sep>/Helilaid_Arlamov_61119.py
#1
a = int(input())
b = int(input())
if a < b:
print(a)
elif b < a:
print(b)
else:
print("Они равны")
#2
x = int(input())
if x > 0:
print('+')
elif x < 0:
print('-')
else:
print(x)
#3
x1 = int(input())
y1 = int(input())
x2 = int(input())
y2 = int(input())
if (x1 + y1 + x2 + y2) % 2 == 0:
print('YES')
else:
print('NO')
#4
year = int(input())
if (year % 4 == 0) and (year % 100 != 0) or (year % 400 == 0):
print('YES')
else:
print('NO')
#5
a = int(input())
b = int(input())
c = int(input())
if b >= a <= c:
print(a)
elif a >= b <= c:
print(b)
else:
print(c)
#6a = int(input())
b = int(input())
c = int(input())
if a == b == c:
print(3)
elif a == b or b == c or a == c:
print(2)
else:
print(0)
#7
x1 = int(input())
y1 = int(input())
x2 = int(input())
y2 = int(input())
if x1 == x2 or y1 == y2:
print('YES')
else:
print('NO')
#8
x1 = int(input())
y1 = int(input())
x2 = int(input())
y2 = int(input())
if abs(x1 - x2) <= 1 and abs(y1 - y2) <= 1:
print('YES')
else:
print('NO')
#9
x1 = int(input())
y1 = int(input())
x2 = int(input())
y2 = int(input())
if abs(x1 - x2) == abs(y1 - y2):
print('YES')
else:
print('NO')
#10
x1 = int(input())
y1 = int(input())
x2 = int(input())
y2 = int(input())
if abs(x1 - x2) == abs(y1 - y2) or x1 == x2 or y1 == y2:
print('YES')
else:
print('NO')
#11
x1 = int(input())
y1 = int(input())
x2 = int(input())
y2 = int(input())
dx = abs(x1 - x2)
dy = abs(y1 - y2)
if dx == 1 and dy == 2 or dx == 2 and dy == 1:
print('YES')
else:
print('NO')
#12
n = int(input())
m = int(input())
k = int(input())
if k < n * m and ((k % n == 0) or (k % m == 0)):
print('YES')
else:
print('NO')
#13
n = int(input())
m = int(input())
x = int(input())
y = int(input())
if n > m:
n, m = m, n
if x >= n / 2:
x = n - x
if y >= m / 2:
y = m - y
if x < y:
print(x)
else:
print(y)
#14
import random
a = random.randint(1,50)
h = int(input('Сколько попыток будет?:'))
g = 0
while h > 0:
N = int(input('Введи число от 1 до 50:'))
if N <= 50:
if N == a:
print('Ты угадал')
a = random.randint(1,50)
g += 1
elif N < a:
print('У тебя ставка ниже')
else:
print('У тебя ставка выше')
else:
print('Вы ввели не корректное чило')
h += 1
print('Осталось',h,'попыток')
print('Загаданно было',a)
print('Угадал', g,'раз')
<file_sep>/Helilaid.py
class Student():
def __init__(self ,name):
self.__name = name
self.__secname = ' '
def get_name(self):
print(self.__name)
def set_name(self, name):
self.__name = name
def get_secname(self):
print(self.__secname)
def set_secname(self, name):
self.__secname = name
a = Student('lil')
a.set_secname('lol')
a.get_name()
a.get_secname()
a.set_name('lil')
a.set_secname('lol')
a.get_name()
a.get_secname()
b = Student('Danja')
b.get_name()
<file_sep>/Helilaid2.py
class myClass():
def method1(self):
print("Daniil")
def method2(self,someString):
print("Helilaid")
def main():
# exercise the class methods
c = myClass ()
c.method1()
c.method2(" Testing is fun")
if __name__== "__main__":
main()<file_sep>/Helilaid1.py
class Vehicle(object):
"""docstring"""
def __init__(self, color, doors, tires):
"""Constructor"""
self.color = color
self.doors = doors
self.tires = tires
def brake(self):
"""
Машина остановилась
"""
return "Тормозит"
def drive(self):
"""
Машина едет
"""
return " Я еду!"<file_sep>/Vikhrova_Helilaid.py
#1 zadanie.
r, c = input().split()
arr = list()
for _ in range(int(r)):
arr.append(input().split())
m = max(e for r in arr for e in r)
for i, r in enumerate(arr):
if m in r:
print(i, r.index(m))
break
#2 zadanie
n = int(input())
a = [['.'] * n for i in range(n)]
for i in range(n):
a[i][i] = '*'
a[n // 2][i] = '*'
a[i][n // 2] = '*'
a[i][n - i - 1] = '*'
for row in a:
print(' '.join(row))
#3 zadanie
n = int(input('n = '))
m = int(input('m = '))
r = [['.*'[(j + i) % 2] for j in range(m)] for i in range(n)]
print(r)
#4
n = int(input())
a = [[abs(i - j) for j in range(n)] for i in range(n)]
for row in a:
print(' '.join([str(i) for i in row]))
#5
n = int(input())
a = [[0] * n for i in range(n)]
for i in range(n):
a[i][n - i - 1] = 1
for i in range(n):
for j in range(n - i, n):
a[i][j] = 2
for row in a:
for elem in row:
print(elem, end=' ')
print()
#6
def swap_columns(a, i, j):
for k in range(len(a)):
a[k][i], a[k][j] = a[k][j], a[k][i]
n, m = [int(i) for i in input().split()]
a = [[int(j) for j in input().split()] for i in range(n)]
i, j = [int(i) for i in input().split()]
swap_columns(a, i, j)
print('\n'.join([' '.join([str(i) for i in row]) for row in a]))
#7
n,m= map(int,input().split())
nev = []
res =[]
for i in range(1,n*m+1):
nev.append(str(i))
if len (nev) == m:
res.append(nev)
nev = []
for i,j in enumerate(res):
if i%2 !=0:
j = reversed(j)
print(*j)
<file_sep>/helilaid.py
class Dog ():
age = 5
name = "Charly"
weight = 15
toyterier = Dog()
toyterier.age = 5
toyterier.name = "Charly"
toyterier.weight = 15
#----------------------------------------------------
class Person():
name = "Jegor"
cellphone = "2281337"
email = "<EMAIL>"
#-----------------------------------------------------
class bird():
color = " "
name = " "
breed = " "
mybird = bird ()
mybird.color = "black"
mybird.name = "Sunny"
mybird.breed = "Sun Conure"
#-------------------------------------------------------------
class Hero:
power = " "
name = " "
agility = " "
speed = " "
#------------------------------------------------------------------
class Peson:
name = ""
money = 0
bob = Person ()
bob.name = "bob"
bob.money = 0
print (bob.name, "has", "dollars.")
#---------------------------------------------------------------
x = 0
y = 0
mainhero = Hero()
mainhero.x = 3939
mainhero.y = 3030
mainhero.name = "Blaka"
mainhero.power = 39
mainhero.agility = 90
mainhero.speed = 20
#--------------------------------------------
class Person:
name = " "
money = 0
bob = Person ()
name = "Bla"
money = 1080
#--------------------------------------------
class cow ():
age = 17
name = "Bob"
weight = 61
#------------------------------------------
import time
class Cat():
def __init__ (self, name, color, weight):
self.name = name
self.color = color
self.weight = weight
def info(self):
time.sleep(0.5)
print( "Имя: " + self.name )
time.sleep(0.7)
print( "Цвет: " + self.color )
time.sleep(0.8)
print( "Вес: " + str( self.weight ) )
def meow(self):
print( self.name + ", лег(-ла) спать..." )
time.sleep(0.6)
print( self.name + " мурлычит..." )
cot = Cat( "Котёнок - Царь","Брюнет",300)
cot.info()
cot.meow()
| 391cbe10ffcb87f8b5cd1c281ceff429b0de5eaf | [
"Markdown",
"Python"
] | 9 | Python | EesTiK/Daniil | fff8fdbde19d039fe71a270ee7088f3f881f97d5 | be10b025c61b52620570103b8cf5d6f845fea8af |
refs/heads/master | <repo_name>fugazi/ember.js<file_sep>/packages/ember-routing/tests/render_test.js
module("Rendering in the router");
test("By default, `render` renders into the application's outlet", function() {
expect(1);
var router = Ember.Router.extend({
applicationController: Ember.Controller.extend({
viewDidChange: Ember.observer(function() {
equal(this.get('view.templateName'), 'posts');
}, 'view')
}).create(),
namespace: {},
root: Ember.Route.extend({
template: 'application',
posts: Ember.Route.extend({
template: 'posts'
})
})
}).create();
var postsRoute = router.get('states.root.states.posts');
Ember.run(function() {
postsRoute.render(router);
});
});
test("If a view class for a given template exists, use it and update it with the relevant templateName", function() {
expect(2);
var PostView = Ember.Object.extend();
var router = Ember.Router.extend({
applicationController: Ember.Controller.extend({
viewDidChange: Ember.observer(function() {
ok(this.get('view') instanceof PostView, "The view is an instance of PostView");
equal(this.get('view.templateName'), 'post');
}, 'view')
}).create(),
namespace: {
PostView: PostView
},
root: Ember.Route.extend({
template: 'application',
posts: Ember.Route.extend({
template: 'post'
})
})
}).create();
var postsRoute = router.get('states.root.states.posts');
Ember.run(function() {
postsRoute.render(router);
});
});
test("The default template to render into is `application`", function() {
expect(1);
var router = Ember.Router.extend({
applicationController: Ember.Controller.extend({
viewDidChange: Ember.observer(function() {
equal(this.get('view.templateName'), 'posts');
}, 'view')
}).create(),
namespace: {},
root: Ember.Route.extend({
posts: Ember.Route.extend({
template: 'posts'
})
})
}).create();
var postsRoute = router.get('states.root.states.posts');
Ember.run(function() {
postsRoute.render(router);
});
});
test("You can override the template to render and the template to render into", function() {
expect(1);
var router = Ember.Router.extend({
appController: Ember.Controller.extend({
viewDidChange: Ember.observer(function() {
equal(this.get('view.templateName'), 'other');
}, 'view')
}).create(),
namespace: {},
root: Ember.Route.extend({
posts: Ember.Route.extend({
template: 'posts'
})
})
}).create();
var postsRoute = router.get('states.root.states.posts');
Ember.run(function() {
postsRoute.render(router, { into: 'app', template: 'other' });
});
});
| 7f154c4515723b89cd9f75a9ede23db930722a3a | [
"JavaScript"
] | 1 | JavaScript | fugazi/ember.js | abc7f61b2b064bbabd622e7bbf63668883286b58 | db030575b177869d0175cffd00d4ca5cfb4e6023 |
refs/heads/master | <file_sep>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from PIL import Image, ImageDraw, ImageFont
from flask import Flask, send_file, request, jsonify
from flask_restful import reqparse, abort, Api, Resource, url_for
import socket
import random
app = Flask(__name__)
api = Api(app)
class Movimiento(Resource):
def get(self):
#distancias que envía el sensor ultrasónico
frente = request.args.get('frente')
inicio = request.args.get('inicio')
derecha = request.args.get('derecha')
izquierda = request.args.get('izquierda')
atras = request.args.get('atras')
foo =["izquerda", "derecha", "frente"]
return {"avazar": {random.choice(foo): "50"},
"inicio": inicio,
"imagen": "https://pruebamoodle.ga/imagen?derecha="+derecha+"&izquierda="+izquierda+"&frente="+frente
}
api.add_resource(Movimiento, '/movimiento')
@app.route("/imagen")
def serveImage():
#generacion de la imagen de manera dinámica
frente = request.args.get('frente')
derecha = request.args.get('derecha')
izquierda = request.args.get('izquierda')
atras = request.args.get('atras')
try:
image = Image.new('RGB', (300, 300))
draw = ImageDraw.Draw(image)
draw.line((150, 70, 150, 150), fill=None)
draw.line((70, 150, 230, 150), fill=None)
#distancia al frente
draw.text((140, 50), frente, font=None, fill=None)
#distancia a la derecha
draw.text((235, 145), derecha, font=None, fill=None)
#distancia a la izquierda
draw.text((50, 145), izquierda, font=None, fill=None)
image.save("./tmp/img.jpg")
return send_file("./tmp/img.jpg")
except:
return jsonify({"Error": "Argumentos insuficientes para generar imagen"})
if __name__ == '__main__':
app.run(debug=True)
<file_sep> FLASK_APP=main.py flask run | 84d453a491c3c0278443d28596ee69728244bfb1 | [
"Python",
"Shell"
] | 2 | Python | irving19o/moviles | 135a058610415f6647113934cd2ab7489fee704f | 5febecf97771134b8d0204a4d3bdac440365e9b7 |
refs/heads/master | <repo_name>Suryopambudi/Favorite-Quotes-Apps<file_sep>/src/pages/signin/signin.ts
import { Component } from '@angular/core';
import { IonicPage, NavController, NavParams, ToastController, LoadingController } from 'ionic-angular';
import { authService } from '../../services/auth';
/**
* Generated class for the SigninPage page.
*
* See http://ionicframework.com/docs/components/#navigation for more info
* on Ionic pages and navigation.
*/
@IonicPage()
@Component({
selector: 'page-signin',
templateUrl: 'signin.html',
})
export class SigninPage {
constructor(public navCtrl: NavController, public navParams: NavParams,
private authservice : authService,
private toastCtrl : ToastController,
private loader : LoadingController) {
}
ionViewDidLoad() {
console.log('ionViewDidLoad SigninPage');
}
signin(form){
let loading = this.loader.create({
content: 'Please wait...'
});
loading.present();
this.authservice.signin(form.value['email'],form.value['password']).catch(err=>{
const toast = this.toastCtrl.create({
message: err.message,
duration: 3000,
position: 'bottom'
});
toast.present();
})
loading.dismiss();
}
}
<file_sep>/src/pages/favorites/favorites.ts
import { Component } from '@angular/core';
import { IonicPage, NavController, NavParams, ModalController, ViewController, AlertController,ToastController,ActionSheetController,PopoverController } from 'ionic-angular';
import {QuotePage} from '../quote/quote';
import {Quote} from '../../data/quotes.interface'; //import the quote variable structure
import { QuotesService } from '../../services/quotes';
import { SettingsService } from '../../services/settings';
import { PopoverPage } from '../popover/popover';
import { authService } from '../../services/auth';
/**
* Generated class for the FavoritesPage page.
*
* See http://ionicframework.com/docs/components/#navigation for more info
* on Ionic pages and navigation.
*/
@Component({
selector: 'page-favorites',
templateUrl: 'favorites.html',
})
export class FavoritesPage {
favoriteQuotes : Quote[];
constructor(public tostCtrl: ToastController,
public navCtrl: NavController,
public navParams: NavParams,
public modalCtrl: ModalController,
private quotesService: QuotesService,
private settingsSvc: SettingsService,
public alertCtrl: AlertController,
public actionSheetCtrl: ActionSheetController,
public popoverCtrl: PopoverController,
private authservice : authService) {
}
presentPopover(myEvent){
let popover = this.popoverCtrl.create(PopoverPage);
popover.present({
ev: myEvent
});
}
ionViewWillEnter() {
this.favoriteQuotes = this.quotesService.getAllFavoriteQuotes();
}
getQuoteDetails(quote: Quote) {
let modal = this.modalCtrl.create(QuotePage, quote);
modal.present();
modal.onDidDismiss((isUnfavorite: boolean) => {
if(isUnfavorite) this.quotesService.removeQuoteFromFavorites(quote);
});
}
removeQuoteFromFavorite(quote : Quote){
this.quotesService.removeQuoteFromFavorites(quote);
}
setBgColor(){
return this.settingsSvc.isAltBackground()?'altQuoteBackground': 'quoteBackground';
}
presentActionSheet(quote){
const actionSheet = this.actionSheetCtrl.create({
title: 'Quote Options',
buttons:[
{
text: 'Delete',
role: 'destructive',
handler: () => {
this.removeQuoteFromFavorite(quote);
}
},
{
text: 'ShowQuoteDetail',
handler: () => {
this.getQuoteDetails(quote);
}
},
{
text: 'Cancel',
role: 'cancel',
handler: () =>{
console.log('kensel');
}
}
]
});
actionSheet.present();
}
addQuote(q){
const alert = this.alertCtrl.create({
title : 'Add New Quote',
inputs:[
{
name: 'person',
placeholder: 'Name'
},
{
name: 'text',
placeholder: 'Quotes'
}
],
buttons:[
{
text:'OK',
handler:data =>{
this.presentToast(data);
}
},
{
text:'Cancel',
role:'cancel',
handler:() =>{
console.log('I Change my mind.');
}
}
]
});
alert.present();
}
presentToast(data){
this.quotesService.addQuoteToFavorites(data);
const toast = this.tostCtrl.create({
message: 'New Quotes Added',
duration: 3000,
position: 'bottom'
});
toast.present();
}
ionViewDidLoad() {
console.log('ionViewDidLoad FavoritesPage');
}
}
<file_sep>/src/pages/quote/quote.ts
import { Component } from '@angular/core';
import { NavController, NavParams, ViewController } from 'ionic-angular';
import {Quote} from '../../data/quotes.interface'; //import the quote variable structure
import { QuotesService } from '../../services/quotes';
/**
* Generated class for the QuotePage page.
*
* See http://ionicframework.com/docs/components/#navigation for more info
* on Ionic pages and navigation.
*/
@Component({
selector: 'page-quote',
templateUrl: 'quote.html',
})
export class QuotePage {
quote : Quote;
constructor(public navCtrl: NavController, public navParams: NavParams, private quotesService : QuotesService, private viewCtrl: ViewController) {
}
ngOnInit(){
this.quote = this.navParams.data;
console.log(this.quote);
}
favoriteButton_Click(quote) {
//this.quotesService.addQuoteToFavorites(quote);
}
unfavoriteButton_Click() {
//this.quotesService.removeQuoteFromFavorites(quote);
this.viewCtrl.dismiss(true);
}
cancelButton_Click(){
this.viewCtrl.dismiss(false);
}
}
<file_sep>/src/pages/quotes/quotes.ts
import { Component } from '@angular/core';
import { NavController, NavParams, AlertController} from 'ionic-angular';
import { QuotesService } from '../../services/quotes';
/**
* Generated class for the QuotesPage page.
*
* See http://ionicframework.com/docs/components/#navigation for more info
* on Ionic pages and navigation.
*/
@Component({
selector: 'page-quotes',
templateUrl: 'quotes.html',
})
export class QuotesPage {
quote: any;
constructor(public navCtrl: NavController, public navParams: NavParams ,public quoteService: QuotesService, public alertCtrl: AlertController) {
}
onAddQuote(quote){
const alert = this.alertCtrl.create({
title : 'Add Quote',
message: 'Are you sure you want to add the quote to favorites?',
buttons:[
{text:'OK',
handler:() =>{
this.quoteService.addQuoteToFavorites(quote);
console.log(this.quoteService)
}
},
{
text:'Cancel',
role:'cancel',
handler:() =>{
console.log('I Change my mind.');
}
}
]
});
alert.present();
}
onRemoveQuote(quote){
const alert = this.alertCtrl.create({
title : 'Remove Quote',
message: 'Are you sure you want to remove the quote to favorites?',
buttons:[
{text:'OK',
handler:() =>{
this.quoteService.removeQuoteFromFavorites(quote);
console.log(this.quoteService)
}
},
{
text:'Cancel',
role:'cancel',
handler:() =>{
console.log('I Change my mind.');
}
}
]
});
alert.present();
}
ionViewDidLoad() {
console.log('ionViewDidLoad QuotesPage');
this.quote = this.navParams.data;
console.log(this.quote)
}
}
<file_sep>/src/services/quotes.ts
import { Quote } from '../data/quotes.interface';
import { authService } from '../services/auth';
import { Injectable } from "@angular/core";
import { Http,Response } from '@angular/http';
import'rxjs';
@Injectable()
export class QuotesService {
private favoriteQuotes: Quote[] = [];
constructor(public http: Http, public authservice : authService) {
}
addQuoteToFavorites(quote: Quote){
this.favoriteQuotes.push(quote)
}
removeQuoteFromFavorites(quote:Quote){
let counter = this.favoriteQuotes.indexOf(quote)
this.favoriteQuotes.splice(counter,1)
console.log(this.favoriteQuotes)
}
removeAllQuote(){
this.favoriteQuotes.splice(1);
}
isFavorite(quote: Quote){
if(this.favoriteQuotes.indexOf(quote) > -1){
return true
}else{
return false
}
}
getAllFavoriteQuotes(){
return this.favoriteQuotes;
}
storeList(token : string){
const uid = this.authservice.currentUser().uid;
return this.http
.put('https://favoritequotesapp-9c44b.firebaseio.com/' +uid+ '/favquotes.json?auth=' +token, this.favoriteQuotes)
.map((response: Response) => {
return response.json();
});
}
getdata(token){
console.log("fetching data");
const uid = this.authservice.currentUser().uid;
return this.http
.get('https://favoritequotesapp-9c44b.firebaseio.com/' + uid + '/favquotes.json?auth=' + token)
.map((response : Response) => {
this.favoriteQuotes = response.json();
console.log(this.favoriteQuotes);
return this.favoriteQuotes;
});
}
}<file_sep>/src/pages/settings/settings.ts
import { Component } from '@angular/core';
import { IonicPage, NavController, NavParams, Toggle, ToastController, Config } from 'ionic-angular';
import { SettingsService } from '../../services/settings';
import { NgForm } from "@angular/forms/src/forms";
/**
* Generated class for the SettingsPage page.
*
* See http://ionicframework.com/docs/components/#navigation for more info
* on Ionic pages and navigation.
*/
@IonicPage()
@Component({
selector: 'page-settings',
templateUrl: 'settings.html',
})
export class SettingsPage {
constructor(public navCtrl: NavController, public navParams: NavParams, public settingsSvc : SettingsService, private toastCtrl: ToastController, private config:Config) {
}
onToggle (toggle: Toggle){
this.settingsSvc.setBackground(toggle.checked);
}
isChecked(){
return this.settingsSvc.isAltBackground();
}
applyConfig(form: NgForm){
let tabs = form.value.tabs;
let page = form.value.page;
this.config.set('android','tabsPlacement',tabs);
this.config.set('android','pageTransition',page);
this.config.set('ios','tabsPlacement',tabs);
this.config.set('ios','pageTransition',page);
const toast = this.toastCtrl.create({
message: 'Config has been applied.',
duration: 3000,
position: 'bottom'
});
toast.present();
}
}
<file_sep>/src/pages/signup/signup.ts
import { Component } from '@angular/core';
import { IonicPage, NavController, NavParams, ToastController } from 'ionic-angular';
import { authService } from '../../services/auth';
/**
* Generated class for the SignupPage page.
*
* See http://ionicframework.com/docs/components/#navigation for more info
* on Ionic pages and navigation.
*/
@IonicPage()
@Component({
selector: 'page-signup',
templateUrl: 'signup.html',
})
export class SignupPage {
constructor(public navCtrl: NavController, public navParams: NavParams,
private authservice : authService,
private toastCtrl : ToastController) {
}
ionViewDidLoad() {
console.log('ionViewDidLoad SignupPage');
}
signup(form){
this.authservice.signup(form.value['email'],form.value['password']).catch(err => {
const toast = this.toastCtrl.create({
message: err.message,
duration: 3000,
position: 'bottom'
});
toast.present();
})
}
}
<file_sep>/src/app/app.component.ts
import { Component,ViewChild } from '@angular/core';
import { Platform } from 'ionic-angular';
import { StatusBar } from '@ionic-native/status-bar';
import { SplashScreen } from '@ionic-native/splash-screen';
import { TabsPage } from '../pages/tabs/tabs';
import { SettingsPage } from '../pages/settings/settings';
import { IonicPage, NavController, NavParams, MenuController, LoadingController} from 'ionic-angular';
import { SigninPage } from '../pages/signin/signin';
import { SignupPage } from '../pages/signup/signup';
import { authService } from '../services/auth';
import { QuotesService } from '../services/quotes';
import firebase from 'firebase';
@Component({
templateUrl: 'app.html'
})
export class MyApp {
rootPage:any = TabsPage;
tabsPage = TabsPage;
settingsPage = SettingsPage;
signinPage = SigninPage;
signupPage = SignupPage;
signin = false;
@ViewChild('sideMenuContent') nav:NavController
constructor(platform: Platform, statusBar: StatusBar, splashScreen: SplashScreen,
private menuCtrl : MenuController, private authservice : authService,
private quotes: QuotesService, private loader : LoadingController) {
platform.ready().then(() => {
firebase.initializeApp({
apiKey : "<KEY>",
authDomain : "favoritequotesapp-9c44b.firebaseapp.com"
});
firebase.auth().onAuthStateChanged(user => {
if(user){
this.signin = true;
let loading = this.loader.create({
content: 'Please wait...'
});
loading.present();
this.authservice.currentUser().getIdToken().then(
(token: string) => {
this.quotes.getdata(token).subscribe(
() => {
console.log('berhasil');
},
error => {
console.log('gagal');
}
)
}
);
loading.dismiss();
this.nav.setRoot(TabsPage);
}
else{
this.signin = false;
this.nav.setRoot(SigninPage);
this.menuCtrl.close();
}
})
// Okay, so the platform is ready and our plugins are available.
// Here you can do any higher level native things you might need.
statusBar.styleDefault();
splashScreen.hide();
});
}
onLoad(page : any) {
this.nav.setRoot(page);
this.menuCtrl.close();
}
logout(){
this.authservice.logout();
}
}
<file_sep>/src/pages/popover/popover.ts
import { Component } from '@angular/core';
import { IonicPage, NavController, NavParams,ViewController,AlertController, ToastController, LoadingController } from 'ionic-angular';
import { QuotesService } from '../../services/quotes';
import { authService } from '../../services/auth';
/**
* Generated class for the PopoverPage page.
*
* See http://ionicframework.com/docs/components/#navigation for more info
* on Ionic pages and navigation.
*/
@IonicPage()
@Component({
selector: 'page-popover',
templateUrl: 'popover.html',
})
export class PopoverPage {
constructor(public navCtrl: NavController, public navParams: NavParams, public viewCtrl: ViewController, public alertCtrl: AlertController, public toastCtrl: ToastController, private quotesService: QuotesService,private authservice : authService, private loader : LoadingController) {
}
close(){
this.viewCtrl.dismiss();
}
presentToast(data){
this.quotesService.addQuoteToFavorites(data);
const toast = this.toastCtrl.create({
message: 'New Quotes Added',
duration: 3000,
position: 'bottom'
});
toast.present();
}
addQuote(q){
const alert = this.alertCtrl.create({
title : 'Add New Quote',
inputs:[
{
name: 'person',
placeholder: 'Name'
},
{
name: 'text',
placeholder: 'Quotes'
}
],
buttons:[
{
text:'OK',
handler:data =>{
this.presentToast(data);
}
},
{
text:'Cancel',
role:'cancel',
handler:() =>{
console.log('I Change my mind.');
}
}
]
});
alert.present();
}
saveFirebase(){
let loading = this.loader.create({
content: 'Please wait...'
});
loading.present();
this.authservice.currentUser().getToken()
.then(
(token:string) => {
//save the data!
this.quotesService.storeList(token)
.subscribe(
() => {
const toast = this.toastCtrl.create({
message : 'Your Favorite Quotes has been saved',
duration : 3000,
position : 'bottom'
});
toast.present();
},
error => {
const toast = this.toastCtrl.create({
message : error,
duration : 3000,
position : 'bottom'
});
toast.present();
}
)
}
);
loading.dismiss();
this.close();
}
clearAll(){
let loading = this.loader.create({
content: 'Please wait...'
});
loading.present();
this.quotesService.removeAllQuote();
this.authservice.currentUser().getToken()
.then(
(token:string) => {
this.quotesService.storeList(token)
.subscribe(
() => {
const toast = this.toastCtrl.create({
message : 'Your Favorite Quotes has been cleared out',
duration : 3000,
position : 'bottom'
});
toast.present();
},
error => {
const toast = this.toastCtrl.create({
message : error,
duration : 3000,
position : 'bottom'
});
toast.present();
}
)
}
);
loading.dismiss();
}
ionViewDidLoad() {
console.log('ionViewDidLoad PopoverPage');
}
}
| 83e83aff4f5eb5432d21f6e0ad20c2c2008535ec | [
"TypeScript"
] | 9 | TypeScript | Suryopambudi/Favorite-Quotes-Apps | 1efac8117a7a7e84f9e9006c46fa6f4a24cb06c5 | 5a0c57e02f8708917ed577cf56f75fe6543198f7 |
refs/heads/main | <file_sep>import React, { useState, useEffect } from "react";
import './HeaderHome.css';
import { TwitchEmbed, TwitchEmbedLayout } from "twitch-player";
function HeaderHome(props) {
return (
<div className="header-home">
{props.live ? <Live data={props} /> : <Counter data={props} />}
</div>
)
}
function Counter(props) {
return (
<>
<div className="container-header-vid">
<video autoPlay loop>
<source src={process.env.PUBLIC_URL + '/vid1.mp4'} type="video/mp4"/>
</video>
</div>
<div className={props.data.about ? "counter-container open-about" : "counter-container"}>
<div className="counter-item days">
<span className="counter-days">{props.data.date.days}</span>
<span>Days</span>
</div>
<div className="separator">:</div>
<div className="counter-item hours">
<span className="counter-hours">{props.data.date.hours}</span>
<span>hours</span>
</div>
<div className="separator">:</div>
<div className="counter-item minutes">
<span className="counter-minutes">{props.data.date.minutes}</span>
<span>minutes</span>
</div>
</div>
<div className={props.data.about ? "before-live open-about" : "before-live"}>Before live</div>
</>
)
}
function Live(props) {
const [hasStream, setHasStream] = useState(false);
useEffect(() => {
if(!hasStream){
new TwitchEmbed('twitch-embed', {
width: 1280,
height: 720,
channel: 'm_bloodymary',
layout: TwitchEmbedLayout.VIDEO
});
setHasStream(true);
}
}, [hasStream]);
return (
<>
<div className="texture-bg-live bg-grain" style={{ backgroundImage: `url(${process.env.PUBLIC_URL + '/grain-texture-compress.png'})` }}></div>
<div className="texture-bg-live bg-capture" style={{ backgroundImage: `url(${process.env.PUBLIC_URL + '/Capture4.JPG'})` }}></div>
<div className="container-header-live" >
<div className="live-btn">Live</div>
<div id="twitch-embed"></div>
{false &&
<video controls loop>
<source src={process.env.PUBLIC_URL + '/vid1.mp4'} type="video/mp4"/>
</video>
}
</div>
</>
)
}
export default HeaderHome;
<file_sep>import React from 'react';
import './Navbar.css';
function Navbar(props) {
return (
<div className={props.about ? "container-navbar open-about" : "container-navbar"}>
<div className="container-logo">
<img src={process.env.PUBLIC_URL + '/wave_logo.png'}/>
</div>
<div className="container-right-nav">
<div className="container-donate" >
<a href="#" target="_blank">Donate</a>
</div>
<div className="container-about" onClick={() => props.clickAbout()} >
<h2>About</h2>
</div>
</div>
</div>
)
}
export default Navbar;
<file_sep>import React from 'react';
import './App.css';
import Navbar from './components/Navbar';
import HeaderHome from './components/HeaderHome';
// import HeaderHome from './components/HeaderHome';
// import { BrowserRouter as Router, Switch, Route } from 'react-router-dom';
function ContentAbout(){
return (
<div className="content-about-container">
<div className="content-about">
<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit. In libero orci, dignissim at risus ac, <b>ornare vehicula tortor.</b> Vestibulum pharetra dictum dictum. Nunc eu ante id nibh lobortis blandit. Donec at urna ut odio dignissim maximus id vel tellus. Fusce sit amet erat lorem. Pellentesque vulputate ut dui ut mattis. <b>Integer a commodo tortor</b>, id accumsan sapien. Sed dictum lacinia sodales. Fusce congue nulla urna, in pretium ligula volutpat eu.</p>
<br/><br/>
<p>Donec odio quam, eleifend et ornare quis, facilisis vel mauris. Sed blandit felis a dignissim vulputate. Fusce faucibus, quam commodo elementum dapibus, diam nisl facilisis velit, at fermentum tellus ligula in tortor. Aenean ac metus ultricies, efficitur nunc quis, pellentesque ligula. In massa urna, fermentum a orci quis, luctus elementum enim. </p>
</div>
</div>
)
}
export default class App extends React.Component {
constructor(){
super();
this.state = {
about: false,
timeLeft: {},
live : false
}
}
clickAbout = () => {
this.setState({ about : !this.state.about });
}
addZero = (result) => {
if(result < 10){
result = "0" + result;
}
return result;
}
calculateTimeLeft = () => {
let year = new Date().getFullYear();
const difference = +new Date(`06/10/${year}`) - +new Date();
let timeLeft;
if (difference > 0) {
timeLeft = {
days: this.addZero(Math.floor(difference / (1000 * 60 * 60 * 24))),
hours: this.addZero(Math.floor((difference / (1000 * 60 * 60)) % 24)),
minutes: this.addZero(Math.floor((difference / 1000 / 60) % 60))
//seconds: Math.floor((difference / 1000) % 60)
};
}else{
timeLeft = {
days: 0,
hours: 0,
minutes: 0
//seconds: 0
}
this.setState({
live : true
});
}
return timeLeft;
}
componentDidMount() {
this.setState({
timeLeft : this.calculateTimeLeft()
});
}
componentDidUpdate() {
if(!this.state.live){
const timer = setTimeout(() => {
this.setState({
timeLeft : this.calculateTimeLeft()
});
}, 1000);
return () => clearTimeout(timer);
}
}
render(){
return (
<div className={this.state.about ? "display-about" : "display-content"}>
<Navbar about={this.state.about} clickAbout={this.clickAbout}/>
<ContentAbout />
<HeaderHome about={this.state.about} date={this.state.timeLeft} live={this.state.live}/>
</div>
);
}
}
| 5d580ded62202aa9fa2f49d78e099434eeaaf85d | [
"JavaScript"
] | 3 | JavaScript | timothejoubert/macao | 83f3774a45a23653986b3ce8e7a6bedd6ae20192 | f47583345f0efabbd1f624e06b44ad5452fbc7b8 |
refs/heads/master | <repo_name>BlockLatticeOrg/explorer-api<file_sep>/tests/routers/conftest.py
import pytest
from fastapi.testclient import TestClient
from explorer_api.main import app
@pytest.fixture
def test_client():
return TestClient(app)
<file_sep>/explorer_api/main.py
from fastapi import FastAPI
from . import __version__
from .routers import accounts, blocks
app = FastAPI(docs_url=False, redoc_url=False)
v1 = FastAPI(
title="BlockLattice.org Explorer API",
version=__version__,
)
v1.include_router(accounts.router, prefix="/accounts", tags=["accounts"])
v1.include_router(blocks.router, prefix="/blocks", tags=["blocks"])
app.mount("/v1", v1)
<file_sep>/tests/routers/test_accounts.py
import pytest
from explorer_api.utils import Caller
example_address = "nano_3x4ui45q1cw8hydmfdn4ec5ijfdqi4ryp14g4ayh71jcdkwmddrq7ca9xzn9"
example_hash = "CD0A56F7729EBBF62A81235AF34D1D69362F1FCD2542734BD8FEBD9D2EB6C130"
account_history_expected_response = {
"account": example_address,
"history": [],
"previous": "previous",
}
account_info_expected_response = {
"account_version": 2,
"balance": 100000,
"block_count": 1000,
"confirmation_height": 999,
"confirmation_height_frontier": example_hash,
"frontier": example_hash,
"modified_timestamp": 10000,
"open_block": example_hash,
"pending": 0,
"representative": example_address,
"representative_block": example_hash,
"weight": 100000,
}
delegators_expected_response = {"delegators": {example_address: 10000}}
pending_expected_response = {
"blocks": {example_hash: {"amount": 1000, "source": example_address}}
}
@pytest.mark.parametrize(
"endpoint,expected_response",
[
("history", account_history_expected_response),
("info", account_info_expected_response),
("delegators", delegators_expected_response),
("pending", pending_expected_response),
],
)
def test_accounts_endpoints(monkeypatch, test_client, endpoint, expected_response):
monkeypatch.setattr(Caller, "call", lambda _: expected_response)
response = test_client.get(f"/v1/accounts/{example_address}/{endpoint}")
assert response.status_code == 200
assert response.json() == expected_response
<file_sep>/Makefile
linter:
poetry run pre-commit install && poetry run pre-commit run -a -v
tester:
poetry run pytest tests<file_sep>/Dockerfile
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.8-slim
EXPOSE 8000
WORKDIR /app
COPY ./ .
RUN pip3 install poetry==1.0.10
RUN poetry config virtualenvs.create false
RUN poetry install
# Copy startup script
COPY ./assets/entrypoint.sh /usr/sbin/entrypoint.sh
RUN chmod +x /usr/sbin/entrypoint.sh
ENTRYPOINT "/usr/sbin/entrypoint.sh"<file_sep>/explorer_api/rpc.py
from .utils import rpc
@rpc
def account_history(account: str, page: int = 0, count: int = 100) -> dict:
return {
"action": "account_history",
"account": account,
"count": count,
"offset": count * page,
}
@rpc
def account_info(
account: str,
representative: bool = True,
weight: bool = True,
pending: bool = True,
) -> dict:
return {
"action": "account_info",
"account": account,
"representative": representative,
"weight": weight,
"pending": pending,
}
@rpc
def block_info(_hash: str) -> dict:
return {
"action": "block_info",
"json_block": True,
"hash": _hash,
}
@rpc
def delegators(account: str) -> dict:
return {
"action": "delegators",
"account": account,
}
@rpc
def pending(account: str, count: int = 100, source: bool = True) -> dict:
return {
"action": "pending",
"account": account,
"count": count,
"source": source,
}
<file_sep>/tests/test_utils.py
import mock
import pytest
from fastapi import HTTPException
from explorer_api.utils import Caller, rpc
class TestCaller:
@mock.patch("explorer_api.utils.Caller._post")
def test_call_fail(self, mock_post):
mock_post.return_value.json.return_value = {"error": "Error message"}
with pytest.raises(HTTPException):
Caller.call(dict())
@mock.patch("explorer_api.utils.Caller._post")
def test_call(self, mock_post):
expected_response = {"example": "value"}
mock_post.return_value.json.return_value = expected_response
assert Caller.call(dict()) == expected_response
@mock.patch("httpx.post")
def test_post(self, mock_httpx_post):
Caller._post(data := {"key": "value"})
mock_httpx_post.assert_called_once_with(
Caller.uri, data=data, headers=Caller.headers
)
class TestRPCDecorator:
@mock.patch("explorer_api.utils.Caller.call")
def test_rpc(self, mock_call):
@rpc
def f(arg):
return arg
f(data := {"key": "value"})
mock_call.assert_called_once_with(data)
<file_sep>/explorer_api/models.py
from typing import Dict, List, Optional
from pydantic import BaseModel, validator
class HistoryModel(BaseModel):
type: str
account: str
amount: int
local_timestamp: int
height: int
hash: str
class AccountHistoryOutput(BaseModel):
account: str
history: Optional[List[HistoryModel]]
previous: Optional[str]
@validator("history", pre=True, always=True)
def validate_history(cls, value):
return [] if not value else value
class AccountInfoOutput(BaseModel):
account_version: int
balance: int
block_count: int
confirmation_height: int
confirmation_height_frontier: str
frontier: str
modified_timestamp: int
open_block: str
pending: int
representative: str
representative_block: str
weight: int
class BlockInfoContentModel(BaseModel):
balance: str
destination: Optional[str]
previous: Optional[str]
signature: str
class BlockInfoOutput(BaseModel):
amount: int
balance: int
block_account: str
confirmed: bool
contents: BlockInfoContentModel
height: int
local_timestamp: int
class DelegatorsOutput(BaseModel):
delegators: Dict[str, int]
class BlockPendingModel(BaseModel):
amount: int
source: Optional[str]
class PendingOutput(BaseModel):
blocks: Dict[str, BlockPendingModel]
<file_sep>/explorer_api/utils.py
import json
import httpx
from fastapi import HTTPException
from . import settings
def rpc(function):
def wrapper(*args, **kwargs):
return Caller.call(function(*args, **kwargs))
return wrapper
class Caller:
uri = settings.NANO_NODE_URL
headers = {"Content-type": "application/json", "Accept": "application/json"}
@classmethod
def call(cls, data: dict) -> dict:
response = cls._post(json.dumps(data)).json()
if error := response.get("error"):
raise HTTPException(status_code=400, detail=error)
return response
@classmethod
def _post(cls, data: str) -> httpx.Response:
return httpx.post(cls.uri, data=data, headers=cls.headers)
<file_sep>/README.md
[](https://circleci.com/gh/BlockLatticeOrg/explorer-api)
# Explorer API
The Explorer API is one of the parts of the [BlockLattice.org](https://blocklattice.org/) ecosystem. It's an API to extract basic information from [Nano](https://nano.org/) blocks and accounts.
If you want to contribute to this project, this document will show you how to setup this application locally so you can test on your machine and improve upon it.
# Installation
## Docker-composer builder
Automatically build via Shell (Unix / Linux)
Build and deploy:
```bash
# ./build-docker.sh
```
Destroy:
```bash
# ./destroy-docker.sh
```
## Run Manually
In order to run this API, you must have [Python 3.8+](https://www.python.org/downloads/) and [Poetry](https://python-poetry.org/) installed.
### Install dependencies
Enter project's Poetry shell:
```bash
$ poetry shell
```
Install project dependencies:
```bash
$ poetry install
```
### Run local tests
Copy `local.env` to `.env`:
```bash
$ cp local.env .env
```
Run tests with Pytest:
```bash
$ pytest .
```
All tests should pass.
## Running
### Running the API locally
Run the app with `uvicorn`
```bash
$ uvicorn explorer_api.main:app --reload
```
Now the project is running on [http://127.0.0.1:8000](http://127.0.0.1:8000), and the docs can be accessed at [http://127.0.0.1:8000/v1/docs](http://127.0.0.1:8000/v1/docs).
### Using docs
Within the docs, you can test the API with real data as long as you setup a node url. In order to do that, you must configure an IP on your `.env` file:
```bash
NANO_NODE_IP=<real-node-ip>
```
If you don't have a remote or local node, you can learn how to setup your own Nano node [here](https://docs.nano.org/running-a-node/overview/).
_Note: instead of running with IPv6 `[::1]`, you should use IPv4 `0.0.0.0` when running locally._
After setting up a node, you now can test directly from the browser at [http://127.0.0.1:8000/v1/docs](http://127.0.0.1:8000/v1/docs), where you can use the endpoints and request real data from the network. It's important to note that if your node is still not fully synced, you may not be able to extract updated data.
### Contributing
When adding a new feature or refactoring some code, you must create a new branch to upload it.
```bash
$ git checkout -b <new-feature>
```
Before every commit, run the automatic tester and linter:
```bash
$ make tester
$ make linter
```
If you want to check if the commit will pass:
```bash
$ pre-commit
```
<file_sep>/destroy-docker.sh
#!/bin/bash
# VERSION
version='1.0'
# OUTPUT VARS
TERM=xterm
red=`tput setaf 1`
green=`tput setaf 2`
yellow=`tput setaf 3`
bold=`tput bold`
reset=`tput sgr0`
# Check if running as root
if [ "$EUID" -ne 0 ]
then echo "${red}Please run as root: ${reset}${bold}sudo ./destroy-docker.sh${reset}"
exit
fi
echo ""
echo "When proceeding, the explorer-api docker container and image ${red}will be deleted.${reset}"
echo ""
read -r -p "Continue? [y/N] " response
response=${response,,} # tolower
if [[ $response =~ ^(yes|y) ]]; then
echo "Stopping Container..."
docker container stop explorer-api
echo "Removing Container..."
docker container rm explorer-api
echo "Removing image..."
docker image rm blocklattice.org/explorer-api
# Check errors
if [ $? -ne 0 ]; then
echo "${red}It seems errors were encountered. ${reset}"
exit 2
else
echo "Done, destroyed."
fi
else
echo "Cancelled."
fi
<file_sep>/tests/routers/test_blocks.py
from explorer_api.utils import Caller
example_address = "nano_3x4ui45q1cw8hydmfdn4ec5ijfdqi4ryp14g4ayh71jcdkwmddrq7ca9xzn9"
example_hash = "CD0A56F7729EBBF62A81235AF34D1D69362F1FCD2542734BD8FEBD9D2EB6C130"
expected_response = {
"amount": 10000,
"balance": 20000,
"block_account": example_address,
"confirmed": True,
"contents": {
"balance": "1000000",
"destination": example_address,
"previous": example_hash,
"signature": example_hash * 2,
},
"height": 1000,
"local_timestamp": 40000000,
}
def test_blocks_endpoint(monkeypatch, test_client):
monkeypatch.setattr(Caller, "call", lambda _: expected_response)
response = test_client.get(f"/v1/blocks/{example_hash}")
assert response.status_code == 200
assert response.json() == expected_response
<file_sep>/docker-compose.yml
version: '3.7'
services:
explorer-api:
build:
context: .
container_name: explorer-api
image: blocklattice.org/explorer-api
restart: unless-stopped
network_mode: host<file_sep>/tests/test_rpc.py
from explorer_api import rpc, utils
class TestRPC:
example_address = (
"nano_3x4ui45q1cw8hydmfdn4ec5ijfdqi4ryp14g4ayh71jcdkwmddrq7ca9xzn9"
)
example_hash = "CD0A56F7729EBBF62A81235AF34D1D69362F1FCD2542734BD8FEBD9D2EB6C130"
def test_account_history(self, monkeypatch):
monkeypatch.setattr(utils.Caller, "call", lambda x: x)
response = rpc.account_history(self.example_address, 5, 2)
expected_response = {
"action": "account_history",
"account": self.example_address,
"count": 2,
"offset": 10,
}
assert response == expected_response
def test_account_info(self, monkeypatch):
monkeypatch.setattr(utils.Caller, "call", lambda x: x)
response = rpc.account_info(self.example_address, True, False, True)
expected_response = {
"action": "account_info",
"account": self.example_address,
"representative": True,
"weight": False,
"pending": True,
}
assert response == expected_response
def test_block_info(self, monkeypatch):
monkeypatch.setattr(utils.Caller, "call", lambda x: x)
response = rpc.block_info(self.example_hash)
expected_response = {
"action": "block_info",
"json_block": True,
"hash": self.example_hash,
}
assert response == expected_response
def test_delegators(self, monkeypatch):
monkeypatch.setattr(utils.Caller, "call", lambda x: x)
response = rpc.delegators(self.example_address)
expected_response = {
"action": "delegators",
"account": self.example_address,
}
assert response == expected_response
def test_pending(self, monkeypatch):
monkeypatch.setattr(utils.Caller, "call", lambda x: x)
response = rpc.pending(self.example_address, 5, False)
expected_response = {
"action": "pending",
"account": self.example_address,
"count": 5,
"source": False,
}
assert response == expected_response
<file_sep>/build-docker.sh
#!/bin/bash
# VERSION
version='1.0'
# OUTPUT VARS
TERM=xterm
red=`tput setaf 1`
green=`tput setaf 2`
yellow=`tput setaf 3`
bold=`tput bold`
reset=`tput sgr0`
# FLAGS & ARGUMENTS
quiet='false'
verbose='true'
# Check if running as root
if [ "$EUID" -ne 0 ]
then echo "${red}Please run as root: ${reset}${bold}sudo ./build-docker.sh${reset}"
exit
fi
# PRINT INSTALLER DETAILS
[[ $quiet = 'false' ]] && echo "${green} -----------------------${reset}"
[[ $quiet = 'false' ]] && echo "${green}${bold} Blocklattice.org - Explorer API ${version}${reset}"
[[ $quiet = 'false' ]] && echo "${green} -----------------------${reset}"
[[ $quiet = 'false' ]] && echo ""
# VERIFY TOOLS INSTALLATIONS
docker -v &> /dev/null
if [ $? -ne 0 ]; then
echo "${red}Docker is not installed. Please follow the install instructions for your system at https://docs.docker.com/install/.${reset}";
exit 2
fi
docker-compose --version &> /dev/null
if [ $? -ne 0 ]; then
echo "${red}Docker Compose is not installed. Please follow the install instructions for your system at https://docs.docker.com/compose/install/.${reset}"
exit 2
fi
if [[ $quiet = 'false' ]]; then
if [[ $verbose = 'false' ]]; then
docker-compose up -d
else
docker-compose --verbose up -d
fi
else
docker-compose up -d &> /dev/null
fi
# Check errors
if [ $? -ne 0 ]; then
echo "${red}It seems errors were encountered while spinning up the containers. ${reset}"
exit 2
fi
# CHECK API INITIALIZATION
[[ $quiet = 'false' ]] && echo ""
[[ $quiet = 'false' ]] && printf "=> ${yellow}Waiting for Explorer API to fully initialize... "
while ! curl -sL localhost:8000 &> /dev/null; do sleep 1; done
[[ $quiet = 'false' ]] && printf "${green}Done! Open in your browser: http://localhost:8000${reset}\n\n"
<file_sep>/explorer_api/routers/accounts.py
from typing import Optional
from fastapi import APIRouter
from .. import rpc
from ..models import (
AccountHistoryOutput,
AccountInfoOutput,
DelegatorsOutput,
PendingOutput,
)
router = APIRouter()
@router.get("/{account}/history", response_model=AccountHistoryOutput)
async def account_history(
account: str, page: Optional[int] = 0, count: Optional[int] = 100
):
return rpc.account_history(account, page=page, count=count)
@router.get("/{account}/info", response_model=AccountInfoOutput)
async def account_info(
account: str,
representative: Optional[bool] = True,
weight: Optional[bool] = True,
pending: Optional[bool] = True,
):
return rpc.account_info(
account, representative=representative, weight=weight, pending=pending
)
@router.get("/{account}/delegators", response_model=DelegatorsOutput)
async def delegators(account: str):
return rpc.delegators(account)
@router.get("/{account}/pending", response_model=PendingOutput)
async def pending(
account: str, count: Optional[int] = 100, source: Optional[bool] = True
):
return rpc.pending(account, count=count, source=source)
<file_sep>/explorer_api/settings.py
from prettyconf import config
NANO_NODE_URL = "http://" + config("NANO_NODE_IP") + ":7076"
<file_sep>/assets/entrypoint.sh
uvicorn explorer_api.main:app --reload --host 0.0.0.0 --port 8000
tail -f /dev/null<file_sep>/explorer_api/routers/blocks.py
from fastapi import APIRouter
from .. import rpc
from ..models import BlockInfoOutput
router = APIRouter()
@router.get("/{block_hash}", response_model=BlockInfoOutput)
async def block_info(block_hash: str):
return rpc.block_info(block_hash)
<file_sep>/pyproject.toml
[tool.poetry]
name = "explorer-api"
version = "1.0.0"
description = ""
authors = ["<NAME> <<EMAIL>>"]
[tool.poetry.dependencies]
python = "^3.8"
fastapi = "^0.61.1"
httpx = "^0.14.3"
uvicorn = "^0.11.8"
prettyconf = "^2.1.0"
[tool.poetry.dev-dependencies]
pytest = "^5.2"
pytest-cov = "^2.10.1"
black = "^20.8b1"
isort = "^5.5.2"
mock = "^4.0.2"
flake8 = "^3.8.3"
requests = "^2.24.0"
[build-system]
requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api"
| d5767df7ffc811a2072e26f89a54fef48d057b30 | [
"YAML",
"Markdown",
"TOML",
"Makefile",
"Python",
"Dockerfile",
"Shell"
] | 20 | Python | BlockLatticeOrg/explorer-api | cb356bdaaff51b71bfde9e8f90a6f534c44f0625 | b302ac9844c797a738ff99a7ef2223d779d896d9 |
refs/heads/master | <repo_name>xingchenhua/ssh2<file_sep>/MyBatis03/src/com/neusoft/util/MyBatisutil.java
package com.neusoft.util;
import java.io.IOException;
import java.io.InputStream;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import com.neusoft.bean.User;
public class MyBatisutil {
public static SqlSession getSession() {
SqlSession session=null;
try {
String res = "SqlMapConfig.xml";
//读配置文件得到一个输入流
InputStream is = Resources.getResourceAsStream(res);
//生成SqlSessionFactory对象
SqlSessionFactory sf = new SqlSessionFactoryBuilder().build(is);
//得到一个Session
session = sf.openSession();
} catch (IOException e) {
e.printStackTrace();
}
return session;
}
}
<file_sep>/MyBatis03/src/com/neusoft/mapper/UserMapper.java
package com.neusoft.mapper;
import com.neusoft.bean.QueryEntity;
import com.neusoft.bean.User;
public interface UserMapper {
public User findUserByName(User user);
public User findUserByQueryEntity(QueryEntity entity);
}
| cbbaf7b3123d62a63f8334136d1d9ada9465881e | [
"Java"
] | 2 | Java | xingchenhua/ssh2 | fe175a7b7467b196e7c0eb595de7a413d308b090 | 4617d5f9cce2c016076a6a8466a4db2a88ba2b16 |
refs/heads/master | <file_sep>#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "multi-lookup.h"
#include "util.c"
#include <pthread.h>
#include <sys/time.h>
#include "Queue.h"
#include <unistd.h>
#define MAXARGS 15
#define BUFFSIZE 1025
#define INPUTFS "%1024s"
#define NUM_RQTHREADS 5
#define NUM_RTHREADS 10
#define MAXFILES 10
#define MAX_IP_LENGTH INET6_ADDRSTRLEN
void* RequestFnc(void* argument)
{
//INIT LOCAL VARIABLES TO MAKE LIFE EASIER
struct Request *requestor = argument;
char website[BUFFSIZE];
char* payload;
FILE* inFile;
FILE* Log;
queue* q = requestor->Globop->queue;
int pushIsGood = 0;
int threadNum;
int touchedFiles = 0;
int actualhelp = 0;
int HelpFile = 0;
//int KLTFBNF = 0;
int currFile = requestor->currentFile;
int ENDFILE = requestor->Globop->lastFileindex;
//printf("ENDFILE = %d\n",ENDFILE);
pthread_mutex_t* queueBlock;
pthread_mutex_t* serviceBlock;
//SET THE VARIABLES PERTAINNING TO THE STRUCT PASSED IN
threadNum = requestor->ID;
touchedFiles = requestor->touched;
queueBlock = requestor->Globop->queueBlock;
serviceBlock = requestor->Globop->readBlock;
Log = requestor->Log;
/*
pthread_mutex_lock(queueBlock);
printf("I have the lock bitch! ID # %d\n\n",threadNum);
printf("THREAD ID: %d\n", threadNum);
printf("Current file number: %d\n",currFile);
printf("ENDFILE #: %d\n", ENDFILE);
printf("LOG POINTER: %p\n\n",Log);
printf("I am going to unlock the lock bitch! ID # %d\n\n",threadNum);
pthread_mutex_unlock(queueBlock);
*/
//BEGIN CRITICAL DANGER ZONE
while(currFile < ENDFILE)
{
//printf("Thread %d is switching to file %d from %d. \n",threadNum,currFile-1,currFile);
//printf("Thread %d is switching to a file pointer of %p from %p. \n",threadNum,requestor->Globop->listOfiles[currFile-1],requestor->Globop->listOfiles[currFile]);
if(requestor->Globop->listOfiles[currFile] != NULL)
{
//touchedFiles++;
actualhelp = 0;
//KLTFBNF++;
inFile = requestor->Globop->listOfiles[currFile];
while(fscanf(inFile,INPUTFS,website) > 0) //READING FROM THE FILE...
{
while(pushIsGood == 0)
{
pthread_mutex_lock(queueBlock);//Lock the queue!
//printf("Request thread %d has locked the queue\n",threadNum);
if(!queue_is_full(q))
{
payload = malloc(BUFFSIZE);
strncpy(payload,website,BUFFSIZE);
//printf("Adding %s to the Queue...\n",website);
queue_push(q,payload);
pushIsGood = 1;
actualhelp = 1;
//printf("Request thread %d has unlocked the queue\n\n",threadNum);
pthread_mutex_unlock(queueBlock);//unlock the queue!
}
else
{
//printf("Queue's full bitch!\n\n");
pthread_mutex_unlock(queueBlock);//unlock the queue!
pushIsGood = 0;
}
//printf("Request thread %d has unlocked the queue\n",threadNum);
}
pushIsGood = 0;
}
}
if(actualhelp == 1)
{
HelpFile++;
}
if(touchedFiles < ENDFILE - 1)
{
//printf("Loppin! \n");
touchedFiles++;
if(currFile == ENDFILE - 1)
{
currFile = 0;
}
else
{
currFile ++;
}
}
else
{
currFile++;
}
//if(requestor->Globop->fileRefrence[])
}
pthread_mutex_lock(serviceBlock);
if(Log != NULL)
{
if(HelpFile <= ENDFILE)
{
fprintf(Log,"Requester %d has serviced %d files. \n",threadNum,HelpFile);
}
else
{
fprintf(Log,"Requester %d has serviced %d files. MORE THAN FILES? \n",threadNum, HelpFile);
}
}
pthread_mutex_unlock(serviceBlock);
return NULL;
}
//BEGIN RESOVLER FUNCT
void* ResolveFnc(void* argument)
{
//LOCAL VARIABLE DECLARATION
struct Resolve *resolver = argument;
FILE* inFile;
queue* q;
char IPstr[INET6_ADDRSTRLEN];
char* payloadHst;
char website[BUFFSIZE];
int threadNum;
int ReqWpulse;
pthread_mutex_t* queueBlock;
pthread_mutex_t* resolveWrite;
//QUALITY OF LIFE ASSIGNMENTS
q = resolver->Globop->queue;
inFile = resolver->output;
threadNum = resolver->ID;
queueBlock = resolver->Globop->queueBlock;
resolveWrite = resolver->Globop->writeBlock;
ReqWpulse = *resolver->Globop->allRequestfinish;
//printf("Status before root change. %d\n", *resolver->Globop->allRequestfinish);printf("Is there a requestor that has a pulse? %d\n", allReqFinish);
//usleep(100000);
//printf("Status of requestors after a long wait.%d\n", *resolver->Globop->allRequestfinish);
while((!queue_is_empty(q)) || (ReqWpulse == 1))
{
//printf("Is there a requestor that has a pulse? %d\n", ReqWpulse);
ReqWpulse = *resolver->Globop->allRequestfinish;
//printf("Is there a requestor that has a pulse? %d\n", ReqWpulse);
if(!queue_is_empty(q))
{
//MUTEX FOR QUEUE IS HERE
pthread_mutex_lock(queueBlock);//Lock the queue!-----------
//printf("Resolve thread %d has locked the queue\n",threadNum);
payloadHst = queue_pop(q);
strncpy(website,payloadHst,BUFFSIZE);
if(payloadHst == NULL)
{
pthread_mutex_unlock(queueBlock);
}
else
{
//strncpy(website,payloadHst,BUFFSIZE);
//printf("Resolve thread %d has unlocked the queue\n",threadNum);
pthread_mutex_unlock(queueBlock);
//pthread_mutex_lock(resolveWrite);
//printf("Resolve thread %d has locked the file\n\n",threadNum);
if(dnslookup(website,IPstr,INET6_ADDRSTRLEN)==UTIL_FAILURE)
{
//printf("Resolve thread %d has the file #2\n",threadNum);
strncpy(IPstr,"",INET6_ADDRSTRLEN);
fprintf(stderr,"You dun goofed...\n");
}
pthread_mutex_lock(resolveWrite);
//printf("Resolve thread %d has locked the file\n\n",threadNum);
//printf("Resolve thread %d has printed %s:%s \n",threadNum,website,IPstr);
fprintf(inFile, "%s:%s By resolver # %d\n",website,IPstr,threadNum);
//printf("Resolve thread %d has wrote %s:%s \n\n",threadNum,website,IPstr);
pthread_mutex_unlock(resolveWrite);//UNLOCK THE FILE ---------
//printf("Resolve thread %d has unlocked the file\n\n",threadNum);
free(payloadHst);
}
}
}
//free(payloadHst);
//printf("Resolve thread %d has FINISHED.\n",threadNum);
return NULL;
}
int main(int argc, char* argv[])
{
struct timeval begin,end;
gettimeofday(&begin,NULL);
//ERROR CHECK FOR TOO MANY FILES
if(argc < 6)
{
fprintf(stderr, "Not enough arguments passed in, only 6 are needed. You put in: %d\n", (argc));
//printf("i'm in here!\n");
}
//OPEN THE TWO LOG FILES & DECLARE LOCAL VARIABLES
FILE* Beet = fopen(argv[3],"w"); //serviced.txt
if(Beet)
{
//printf("Requester thread service results will be written to: %s\n", argv[3]);
}
//printf("ARGV[3] = %s\n", argv[3]);
//printf("ARGV[4] = %s\n", argv[4]);
FILE* Potato = fopen(argv[4],"w");//results.txt
if(Potato)
{
//printf("Resolver thread results will be written to: %s\n", argv[4]);
}
FILE* inFiles[MAXFILES];
int totalFile = argc - 5;
int lastFile = argc -5;
int helpIndex = 0;
int newFileindex = 5;
int remainingFiles = totalFile;
int totalRequest = atoi(argv[1]);
int totalResolve = atoi(argv[2]);
int REQCheckPulse = 1;
int ErrorChk;
//printf("Total number of files %d\n", totalFile);
//printf("Number of LAST file %d\n", lastFile);
//printf("Total number of requestors asked for %d\n", totalRequest);
printf("Total number of resolvers asked for %d\n", totalResolve);
//printf("I'm initalizing the file array!\n");
for(int k = 0;k < totalFile; k++)
{
inFiles[k] = fopen(argv[k+5],"r");//k = The beginning of the file arguements from the terminal.
//printf("%p\n",inFiles[k]); //prints the memory location of each file index in the array.
}
//UNIVERSAL STRUCT INIT AND MUTEX INIT
struct Globo theClown;
pthread_mutex_t webQueue;
pthread_mutex_t resultsLock;
pthread_mutex_t servicedLock;
//pthread_mutex_t muteXx;
pthread_mutex_init(&webQueue,NULL);
pthread_mutex_init(&resultsLock,NULL);
pthread_mutex_init(&servicedLock,NULL);
//printf("QUEUE BLOCK INIT %d\n",pthread_mutex_init(&webQueue,NULL)); //Queue mutex init
//printf("WRITING BLOCK INIT %d\n",pthread_mutex_init(&resultsLock,NULL)); //Resolver log mutex
//printf("SERVICED BLOCK INIT %d\n",pthread_mutex_init(&servicedLock,NULL)); //Requester log mutex
theClown.inCompleteF = remainingFiles; //Set to completed files to 0
theClown.totalInfiles = totalFile; //Set to total input files given.
theClown.newFilepoint = newFileindex; //Set the index of what will be the new files after request assign.
theClown.helpPoint = helpIndex;//Sets an index to the first open file of the file array...
theClown.lastFileindex = lastFile;//index to the end of the file array.
theClown.allRequestfinish = &REQCheckPulse;
//theClown.mutex = &muteXx;//sets the pointer of the struct mutex pointer to here...
theClown.writeBlock = &resultsLock;//sets the pointer of the struct mutex pointer to here...
theClown.readBlock = &servicedLock;//sets the pointer of the struct mutex pointer to here...
theClown.queueBlock = &webQueue;//sets the pointer of the struct mutex pointer to here...
//QUEUE INIT & ERROR CHECK
//printf("Queue assignment\n");
queue Queue;
ErrorChk = queue_init(&Queue, BUFFSIZE);
if(ErrorChk== -1)
{
printf("Queue is borked!%d\n",ErrorChk);
}
//printf("Slaps top of queue:\n This baby can hold so many god dam nodes... %d\n",ErrorChk); //will print the size of the queue (in nodes) you just created.
//ASSIGN THE QUEUE TO THE STRUCT.
theClown.queue = &Queue;
//OPEN ALL OF THE FILES THE USER GAVE
//printf("I'm initalizing the file array PART TWO!\n");
for(int k = 0;k < totalFile; k++)
{
inFiles[k] = fopen(argv[k+5],"r");//k = The beginning of the file arguements from the terminal.
//printf("FILE NAME: %s\n",argv[k+5]);
//printf("Opening the files! Memory location: %p\n",inFiles[k]); //prints the memory location of each file index in the array.
}
for(int o = 0; o < totalFile; o++)
{
theClown.listOfiles[o] = inFiles[o];
//printf("%p\n",theClown.listOfiles[o]); //prints the memory location of each file index in the array.
}
//CREATE THE REQUEST THREAD POOL
struct Request requestors[totalRequest];
pthread_t requesterThreads[totalRequest];
for(int i = 0; i < totalRequest; i++)
{
requestors[i].Globop = &theClown;
if(i > totalFile)
{
requestors[i].currentFile = 0;
}
else
{
requestors[i].currentFile = i;
}
requestors[i].ID = i;
requestors[i].touched = 0;
requestors[i].Log = Beet;
ErrorChk = pthread_create(&(requesterThreads[i]),NULL,RequestFnc,&(requestors[i]));
if(ErrorChk != 0)
{
fprintf(stderr, "Something went wrong while creating the Request Thread,returned: %d\n",ErrorChk);
}
else
{
//printf("Yee haw! %d\n",i);
}
}
//END REQUEST THREAD POOL
//BEGIN CREATING THE RESOLVER POOL
//usleep(398123);
struct Resolve resolvers[NUM_RTHREADS];
pthread_t resolverThreads[NUM_RTHREADS];
for(int i = 0; i <totalResolve; i++)
{
resolvers[i].Globop = &theClown;
resolvers[i].output = Potato;
resolvers[i].ID = i;
ErrorChk = pthread_create(&(resolverThreads[i]),NULL,ResolveFnc,&(resolvers[i]));
if(ErrorChk)
{
fprintf(stderr, "Something went wrong while creating the Resolve Threads,returned: %d\n",ErrorChk);
}
else
{
//printf("RESOLVER Yee haw! %d\n",i);
}
}
//END RESOLVER POOL CREATION
//WAIT FOR ALL OF THE REQUEST THREADS TO FINISH
int t;
for(t=0; t< totalRequest; t++)
{
//printf("Waiting on the requestors...\n");
//printf("Request Thread %d has FINISHED %d.\n" ,t,pthread_join(requesterThreads[t],NULL));
pthread_join(requesterThreads[t],NULL);
}
//printf("Is there a requestor that has a pulse? BEFORE %d\n", REQCheckPulse);
//printf("Is there a requestor that has a pulse? BEFORE w/ GLOBAL INT %d\n", *theClown.allRequestfinish);
REQCheckPulse = 0;//All your base belong to us.
//printf("Is there a requestor that has a pulse? AFTER %d\n", REQCheckPulse);
//printf("Is there a requestor that has a pulse? AFTER w/ GLOBAL INT %d\n", *theClown.allRequestfinish);
//WAIT FOR ALL OF THE RESOLVER THREADS TO FINISH
int u;
for(u=0; u < totalResolve;u++)
{
//printf("Waiting on the resolvers...\n");
//printf("Resolve Thread %d has FINISHED %d.\n" ,u,pthread_join(resolverThreads[u],NULL));
pthread_join(resolverThreads[u],NULL);
}
//for(int g; )
//usleep(2891010);
pthread_mutex_destroy(&webQueue);
pthread_mutex_destroy(&resultsLock);
pthread_mutex_destroy(&servicedLock);
//pthread_mutex_destroy(&muteXx);
/*for(int t = 0; t< totalFile; t++)
{
fclose(inFiles[t]);
}*/
fclose(Beet);
fclose(Potato);
queue_cleanup(&Queue);
for(int g =0; g < totalFile;g++)
{
if(inFiles[g] != NULL)
{
fclose(inFiles[g]);
}
else
continue;
}
//free(inFiles);
gettimeofday(&end,NULL);
long seconds = (end.tv_sec -begin.tv_sec);
long micro = ((seconds * 1000000) + end.tv_usec) - (begin.tv_usec);
printf("This process took %ld seconds and %ld microseconds to finish\n",seconds,micro);
return 0;
}
<file_sep>Read this to compile and run this program!
Step one: Change your directory to this file (PA3_ZACH_SCHWARZ)
Step two: Enter 'make clean' into your terminal.
Step three: Enter 'make' into your terminal.
Step four: Enter ./multi-lookup serviced.txt results.txt input/names(This is a number from 1 - 5).txt
FILES REQUIRED TO RUN:
Queue.c / Queue.h: Both provided by the amazing Chris and Co
multi-lookup.c / multi-lookup.h: Program that I have made, is the heart of this assignment, creates the many threads and assures that no memory was harmed in the making of this assignment.
util.c / util.h: Is the DNS lookup function of this program, required to return the IP addresses of the host website.
INPUT FILES: names1.txt names2.txt names3.txt names4.txt names5.txt: Without these, I don't really want to put you through the process of making these 5 files... Just use them... <file_sep>#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "Queue.h"
#include "util.h"
#include <pthread.h>
#include <semaphore.h>
struct Globo
{
queue* queue;
int totalInfiles;
int inCompleteF;
int newFilepoint;
int helpPoint;
int lastFileindex;
int ID;
int* allRequestfinish;
FILE* listOfiles [10];
//pthread_mutex_t* mutex;
pthread_mutex_t* writeBlock;
pthread_mutex_t* readBlock;
pthread_mutex_t* queueBlock;
};
struct Request
{
int touched;
int ID;
int currentFile;
FILE* inputF;
FILE* Log;
struct Globo* Globop;
};
struct Resolve
{
FILE* output;
struct Globo* Globop;
int ID;
}; | 2a10c3d71752479f30ab82d5de88c7acb4437f24 | [
"C",
"Text"
] | 3 | C | AtomiCode007/Operating_Systems | da632eb1ea4a2ecc5e3b5bfc6a026fe0ce737149 | ed74af5969953bfcc5f7051f5cae90cf449fd287 |
refs/heads/master | <file_sep># jakema1.github.io<file_sep>let myimage = document.querySelector('img')
myimage.onclick = function(){
let mysrc = myimage.getAttribute('src')
if (mysrc === 'images/248922-106.jpg'){
myimage.setAttribute('src','images/331961-106.jpg')
}else{
myimage.setAttribute('src','images/248922-106.jpg')
}
}
function displayname(name){
let myname = document.querySelector('h1')
myname.textContent ='hello '+ name
}
function setusername(){
let username = prompt('please input your name')
localStorage.setItem('name',username)
displayname(username)
}
let getusername = localStorage.getItem('name')
if(!getusername){
setusername();
}else{
displayname(getusername);
}
let getbutton = document.querySelector('button')
getbutton.onclick = setusername
| 1026650b1b7f541198d4d5e5f29054e39cdb7a6b | [
"Markdown",
"JavaScript"
] | 2 | Markdown | jakema1/jakema1.github.io | 326e755985cda3ccfc9ab2bdb008bfc20e7ef4b2 | 0870bb3ebca4e3ff24a82054c705ac2cb7f7f125 |
refs/heads/master | <repo_name>mathmakgakpak/cursors.io-webpack-client<file_sep>/src/ts/Settings.ts
import { noCursorLock, disableDrawings, disablePlayers } from './elements';
export class Settings {
constructor() {
this.load();
window.addEventListener("beforeunload", this.save);
}
save() {
window.localStorage.noCursorLock = this.noCursorLock;
window.localStorage.disableDrawings = this.disableDrawings;
window.localStorage.disablePlayers = this.disablePlayers;
}
load() {
noCursorLock.checked = window.localStorage.noCursorLock === "true";
disableDrawings.checked = window.localStorage.disableDrawings === "true";
disablePlayers.checked = window.localStorage.disablePlayers === "true";
}
get noCursorLock() {
return noCursorLock.checked;
}
set noCursorLock(v) {
noCursorLock.checked = v;
}
get disableDrawings() {
return disableDrawings.checked;
}
set disableDrawings(v) {
disableDrawings.checked = v;
}
get disablePlayers() {
return disablePlayers.checked;
}
set disablePlayers(v) {
disablePlayers.checked = v;
}
}<file_sep>/src/ts/canvasRenderer.ts
// asd@ts-nocheck
// import { eventSys, PublicAPI } from './global';
import { mapSize, rendererSettings } from './gameSettings';
import { settings, } from './main';
import { MousePositionInterface } from './types';
import Click from "./classes/Click";
import Line from "./classes/Line";
import { LevelObject, ObjectTypes } from './classes/LevelObjects';
import { canvas } from './elements';
// https://stackoverflow.com/questions/43638454/webpack-typescript-image-import?rq=1
import cursor_Image from "../img/cursor.png";
import { Players } from './classes/Player';
// import alphabet from './alphabet';
//export { renderState, renderLevelObjects, renderDrawings, renderClicks, renderHUD, renderPlayers, renderMainPlayer }
// TODO optimize
const PI2 = Math.PI * 2;
const { width, height, canvasWidth, canvasHeight } = mapSize;
const { scale, lineDecayAfter, lineRenderDuration, clickMaxRadius, clickRenderduration, maxRenderedClicks, maxRenderedLines, maxRenderedPlayers } = rendererSettings;
export const ctx = <CanvasRenderingContext2D>canvas.getContext("2d");
const cursorImage = new Image;
cursorImage.src = cursor_Image;
canvas.style.width = String(canvasWidth) + "px";
canvas.style.height = String(canvasHeight) + "px";
// it is used to fix retina screens and to make it more sharper when resized
function setLevelOfDetail() { // https://developer.mozilla.org/en-US/docs/Web/API/Window/devicePixelRatio#correcting_resolution_in_a_canvas
// !!! remember canvas.width != canvas.style.width
const pixelRatio = window.devicePixelRatio;
canvas.width = canvasWidth * pixelRatio;
canvas.height = canvasHeight * pixelRatio;
ctx.scale(pixelRatio, pixelRatio);
}
window.addEventListener("resize", setLevelOfDetail);
setLevelOfDetail();
function clearCanvas() {
ctx.clearRect(0, 0, canvasWidth, canvasHeight);
}
function renderState(wsState: number | undefined) {
let text = "";
switch (wsState) {
case WebSocket.CONNECTING: {
text = "Connecting";
break;
}
case WebSocket.CLOSED:
case WebSocket.CLOSING: {
text = "Lost connection to server";
break;
}
default: {
text = "Click to begin";
break;
}
}
ctx.fillStyle = "#000";
ctx.font = "60px NovaSquare";
ctx.fillText(text, (canvasWidth - ctx.measureText(text).width) / 2, canvasHeight / 2 + 15);
}
function renderLevelObjects(levelObjects: /*LevelObject*/any[]) { // obj.width causes error screw it
levelObjects.forEach(obj => {
let x = obj.x * scale;
let y = obj.y * scale;
ctx.globalAlpha = 1;
if (obj.type === ObjectTypes.TEXT) {
if (obj.isCentered) x -= ctx.measureText(obj.text).width;
ctx.font = obj.size + "px NovaSquare";
ctx.fillStyle = "#000";
ctx.fillText(obj.content, x, y);
} else if (obj.type === ObjectTypes.WALL) {
ctx.fillStyle = obj.color;
ctx.fillRect(x, y, obj.width * scale, obj.height * scale);
} else if (obj.type === ObjectTypes.TELEPORT) {
ctx.fillStyle = obj.isBad ? "#F00" : "#0F0";
ctx.globalAlpha = 0.2;
ctx.fillRect(x, y, obj.width * scale, obj.height * scale);
ctx.globalAlpha = 1;
} else if (obj.type === ObjectTypes.CURSOR_COUNTER) {
ctx.fillStyle = obj.color;
ctx.globalAlpha = 0.2;
ctx.fillRect(x, y, obj.width * scale, obj.height * scale);
ctx.globalAlpha = 0.5;
ctx.fillStyle = "#000";
/* // overcomplicated
let { width, height, count: text } = obj;
if(width < 20 && height < 20) {
ctx.font = "30px NovaSquare";
x += width - ctx.measureText(text).width/2;
y += height + 10;
} else { // 60 / 40 = 1.5
let measureHeight = Math.round(1.5 * Math.hypot(width, height));
ctx.font = measureHeight + "px NovaSquare";
var measure = ctx.measureText(text);
x += width - measure.width / 2;
y += height + measureHeight / 2;
}
*/
// not multipling or dividing obj.width because it has no sense then
let text = obj.count;
if (obj.width < 40 && obj.height < 40) {
ctx.font = "30px NovaSquare";
x += obj.width - ctx.measureText(text).width / 2;
y += obj.height + 10;
} else {
ctx.font = "60px NovaSquare";
x += obj.width - ctx.measureText(text).width / 2;
y += obj.height + 20;
}
ctx.fillText(text, x, y);
ctx.globalAlpha = 1;
} else if (obj.type === ObjectTypes.BUTTON) { // TODO: fix it later
ctx.fillStyle = obj.color;
ctx.fillRect(x, y, obj.width * scale, obj.height * scale);
ctx.fillStyle = "#000";
let text = obj.count;
if (obj.width < 40 && obj.height < 40) {
ctx.font = "30px NovaSquare";
x += obj.width - ctx.measureText(text).width / 2;
y += obj.height + 10;
} else {
ctx.font = "60px NovaSquare";
x += obj.width - ctx.measureText(text).width / 2;
y += obj.height + 20;
}
ctx.fillText(text, x, y);
}
});
}
function renderLines(lines: Line[], now = Date.now()) {
ctx.strokeStyle = "#000";
ctx.lineWidth = 1;
let i = -1;
for(const { x1, y1, x2, y2, removeAt } of lines) {
i++;
const timeLeft = lineDecayAfter - (now - removeAt);
if (timeLeft < 0) {
lines.splice(i, 1);
continue;
}
if(i > maxRenderedLines) {
continue;
}
let degreeOfDecay = timeLeft / lineRenderDuration;
// i could use Math.min(degreeOfDecay, 1) but it would be less readable
if (degreeOfDecay > 1) degreeOfDecay = 1;
ctx.globalAlpha = 0.3 * degreeOfDecay;
ctx.beginPath();
ctx.moveTo(x1 * scale - 0.5, y1 * scale - 0.5);
ctx.lineTo(x2 * scale - 0.5, y2 * scale - 0.5);
ctx.stroke();
};
}
function renderClicks(clicks: Click[], now = Date.now()) {
// ctx.strokeStyle = "#000";
let i = -1;
for (const {x, y, removeAt} of clicks) {
i++;
const timeLeft = (removeAt - now) / clickRenderduration;
if (timeLeft < 0) {
clicks.splice(i, 1);
continue;
}
if(i > maxRenderedClicks) continue;
const degreeOfDecay = timeLeft * 0.3;
const radius = (1 - timeLeft) * clickMaxRadius;
ctx.globalAlpha = degreeOfDecay;
ctx.beginPath();
ctx.arc(x * scale, y * scale, radius, 0, PI2);
ctx.stroke();
}
}
function drawText(text: string, x: number, y: number) {
ctx.globalAlpha = 0.5;
ctx.strokeText(text, x, y);
ctx.globalAlpha = 1;
ctx.fillText(text, x, y);
}
let _hue = 0;
const hue = () => _hue++ < 360 ? _hue : _hue = 0;
function renderHUD(onlinePlayers: number, playersOnLevel: number, actualLevel: number, FPS: number) {
ctx.font = "12px NovaSquare";
//ctx.strokeStyle = "#000";
ctx.lineWidth = 2.5;
ctx.fillStyle = `hsl(${hue()}, 100%, 50%)`
drawText("Client made by felpcereti#9857", 10, 12 * 1.3); // code gets optimized by webpack
ctx.fillStyle = "#fff";
drawText(FPS + " FPS", 10, 12 * 2 * 1.3);
let text = "Use shift+click to draw";
if (playersOnLevel > 100) text = "Area too full, not all cursors are shown";
else if (playersOnLevel > 30) text = "Area too full, drawing is disabled";
let y = canvasHeight - 10;
drawText(text, 10, y);
text = onlinePlayers + " players online";
let measure = ctx.measureText(text).width;
let x = canvasWidth - measure - 10;
drawText(text, x, y);
text = playersOnLevel + " players on level";
measure = ctx.measureText(text).width;
x = canvasWidth - measure - 10;
y -= 12 * 1.5;
drawText(text, x, y);
text = "Actual level: " + actualLevel;
measure = ctx.measureText(text).width;
x = canvasWidth - measure - 10;
y -= 12 * 1.5;
drawText(text, x, y);
}
function renderPlayers(players: Players) {
ctx.font = "12px NovaSquare";
ctx.fillStyle = "#000";
let i = 0;
for (const id in players) {
if (i > maxRenderedPlayers) break;
let player = players[id];
let x = player.x * scale;
let y = player.y * scale;
ctx.drawImage(cursorImage, x - 4, y - 4); // 4 is shadow
ctx.fillText(id, x + 16, y + 24);
i++;
}
}
function renderMainPlayer({ canvasX: px, canvasY: py }: MousePositionInterface, { canvasX: mx, canvasY: my }: MousePositionInterface) { // trunc is as fast as | 0
if (document.pointerLockElement !== canvas && (px !== mx || py !== my)) { // TODO: change that
ctx.fillStyle = "#F00";
ctx.globalAlpha = 0.2;
ctx.beginPath();
ctx.arc(mx + 2, my + 8, 20, 0, PI2);
ctx.fill();
ctx.globalAlpha = 0.5;
ctx.drawImage(cursorImage, mx - 4, my - 4);
}
// draws The Halo
ctx.globalAlpha = 0.2;
ctx.fillStyle = "#FF0";
ctx.beginPath();
ctx.arc(px + 2, py + 8, 20, 0, PI2);
ctx.fill();
ctx.globalAlpha = 1;
ctx.drawImage(cursorImage, px - 4, py - 4);
}
export function renderDoNotEmbedSite() {
ctx.fillStyle = "#000";
ctx.font = "35px NovaSquare";
ctx.fillText("Please do not embed our website, thank you.", 400 - ctx.measureText("Please do not embed our website, thank you.").width / 2, 300);
ctx.font = "16px NovaSquare";
ctx.fillText("Play the game on http://cursors.io/", 400 - ctx.measureText("Play the game on http://cursors.io/").width / 2, 330);
// @ts-ignore
//window.top.location = "http://cursors.io";
throw "Please do not embed our website, thank you.";
}
/*
0. clear
1?: render state
1. level objects
3. drawings
4. clicks
5. HUD
6. players
7. main cursor
*/
export default function RenderFrame(
wsState: number | undefined,
levelObjects: LevelObject[],
drawings: Line[],
clicks: Click[],
onlinePlayers: number,
playersOnLevel: number,
actualLevel: number,
FPS: number,
players: any,
playerPos: MousePositionInterface,
mousePos: MousePositionInterface) {
clearCanvas();
if (wsState !== WebSocket.OPEN) return renderState(wsState);
renderLevelObjects(levelObjects);
if (!settings.disableDrawings) renderLines(drawings); // TODO:
renderClicks(clicks);
renderHUD(onlinePlayers, playersOnLevel, actualLevel, FPS);
renderPlayers(players);
renderMainPlayer(playerPos, mousePos);
}<file_sep>/src/ts/global.ts
import EventEmitter from 'events';
import browserRequire from './browserRequire';
declare global {
var C: any;
var Cursors: any;
}
export const PublicAPI = window.C = window.Cursors = <any>{};
export const eventSys = new EventEmitter();
PublicAPI.emit = eventSys.emit.bind(eventSys);
PublicAPI.on = eventSys.on.bind(eventSys);
PublicAPI.once = eventSys.once.bind(eventSys);
PublicAPI.removeListener = PublicAPI.off = eventSys.removeListener.bind(eventSys);
PublicAPI.require = browserRequire;
PublicAPI.buildNumber = process.env.BUILD_NUMBER;
PublicAPI.version = process.env.VERSION;
PublicAPI.productionBuild = process.env.PRODUCTION_BUILD;<file_sep>/src/ts/gameSettings.ts
export const rendererSettings = {
maxRenderedPlayers: 100,
maxRenderedLines: 4000,
lineRenderDuration: 1000,
lineDecayAfter: 10_000,
clickRenderduration: 500,
clickMaxRadius: 25,
maxRenderedClicks: 30,
scale: 2
}
export const mapSize = {
width: 400,
height: 300,
canvasWidth: 0,
canvasHeight: 0,
}
mapSize.canvasWidth = mapSize.width * rendererSettings.scale;
mapSize.canvasHeight = mapSize.height * rendererSettings.scale;
Object.freeze(rendererSettings);
Object.freeze(mapSize);
export const defaultURL = "wss://cursors.uvias.com";<file_sep>/src/ts/main.ts
import { eventSys, PublicAPI } from './global';
import { Settings } from './Settings';
import mouseEvents, { mousePosition } from './mouse';
import { canvas } from './elements';
import { Client } from './Networking/Client';
import { getCursorsServer, unStuck } from './utils';
import log from './sexylogs';
import RenderFrame, { renderDoNotEmbedSite } from './canvasRenderer';
import { MousePositionInterface } from './types';
import "../style.css";
// https://github.com/qiao/PathFinding.js you can use it for making cheats but you will need to rewrite some things in client
// document.exitPointerLock = document.exitPointerLock || document.mozExitPointerLock || document.webkitExitPointerLock;
export const settings = PublicAPI.settings = new Settings();
log.info("Version: " + PublicAPI.version);
log.info("Build: " + PublicAPI.build);
export const client = PublicAPI.client = new Client({
autoMakeSocket: false
});
let gettingIp = false;
async function connect() {
if(gettingIp) return;
gettingIp = true;
// search websocket proxy on github
client.options.ws = /*`ws://localhost:8080/?target=${*/await getCursorsServer()/*}&origin=http://cursors.io`*/;
gettingIp = false;
client.makeSocket();
}
mouseEvents.on("mousedown", (mousePos: MousePositionInterface, event: MouseEvent) => {
if(gettingIp) return;
if(!client.ws) return connect();
if(client.ws.readyState !== 1) return;
if(!settings.noCursorLock && document.pointerLockElement !== canvas) canvas.requestPointerLock();
if((event.ctrlKey || event.shiftKey) && !settings.disableDrawings) {
let unstucked = unStuck(client.position, mousePos, client.solidMap);
client.draw(client.position.x, client.position.y, unstucked.x, unstucked.y);
} else if(client.position.x === mousePos.x && client.position.y === mousePos.y) {
client.click();
}
return;
});
mouseEvents.on("mousemove", (mousePos: MousePositionInterface) => {
if(client.ws?.readyState !== WebSocket.OPEN) return;
//console.log(client.position, mousePos);
let {x, y, collides} = unStuck(client.position, mousePos, client.solidMap);
// console.log(x, y, collides)
client.move(x, y);
});
let _FPS = 0;
let FPS = 30;
window.setInterval(() => {
FPS = _FPS;
_FPS = 0;
}, 1000);
function render() {
try {
RenderFrame(client.ws?.readyState, client.levelObjects, client.lines, client.clicks, client.usersOnline, client.playersOnLevel, client.level, FPS, client.players, client.position, mousePosition);
_FPS++;
} catch(e) {
log.error("Rendering error: ", e);
}
window.requestAnimationFrame(render);
}
window.requestAnimationFrame(render);
<file_sep>/src/ts/classes/Line.ts
import { rendererSettings } from "../gameSettings";
const lineRenderDuration = rendererSettings.lineRenderDuration;
export default class Line {
public removeAt: number;
constructor(public x1: number,
public y1: number,
public x2: number,
public y2: number,
public drewAt: number
) {
this.removeAt = drewAt + lineRenderDuration;
}
// isRemoved(now: number = Date.now()) { // it's not how it works
// return this.removeAt < now;
// }
}<file_sep>/src/ts/SolidMap.ts
/*
x = 3
y = 4
'*' - y * width
'%' - x
'&' - our point we want
this.solid = new Uint8Array(10 * 7) = this thing underneath
0 1 2 3 4 5 6 7 8 9
0 * * * * * * * * * * Width = 10
1 * * * * * * * * * *
2 * * * * * * * * * *
3 * * * * * * * * * *
4 % % % & # # # # # #
5 # # # # # # # # # #
6 # # # # # # # # # #
Height = 7
*/
import { LevelObject, ObjectTypes, WallObject } from "./classes/LevelObjects";
export default class SolidMap {
private resetUint8Array: Uint8Array; // Uint8Array used to set reset(zero) solid Uint8Array
public solid: Uint8Array;
public length: number;
constructor(public width: number, public height: number) {
this.length = width * height;
this.solid = new Uint8Array(this.length);
this.resetUint8Array = new Uint8Array(this.length);
}
public resetMap() {
this.solid.set(this.resetUint8Array);
}
public setSolidArea(x: number, y: number, x2: number, y2: number, isSolid: boolean) {
// px - point x
// py - point y
for(let py = y; py < y2; py++){
for(let px = x; px < x2; px++){
this.solid[px + py * this.width] = Number(isSolid);
}
}
}
setWallObject({x, y, width, height}: WallObject, isSolid: boolean) {
this.setSolidArea(x, y, x + width, y + height, isSolid);
}
// used only when we get new level
public setLevelObjects(levelObjects: LevelObject[]) {
levelObjects.forEach((obj: LevelObject) => {
if(obj.type === ObjectTypes.WALL) {
this.setWallObject(obj, true);
}
})
}
public isOutsideMap(x: number, y: number) {
return x < 0 && x >= this.width &&
y < 0 && y >= this.height;
}
public isPointSolid(x: number, y: number) {
// if (i < 0 || i >= this.length) return true; // in this implementation x still can be negative and it will count as inside
return !!this.solid[y * this.width + x] || this.isOutsideMap(x, y);
}
}<file_sep>/tsconfig.json
{
"compilerOptions": {
"noImplicitReturns": true,
"esModuleInterop": true,
"target": "ES6",
"module": "ES2022",
"strict": true,
"removeComments": true,
"downlevelIteration": true
},
"include": [
"src/ts/*",
"index.d.ts"
]
}<file_sep>/src/ts/elements.ts
export const canvas = <HTMLCanvasElement>document.getElementById("canvas");
canvas.addEventListener('contextmenu', event => event.preventDefault()); // bob
export const noCursorLock = <HTMLInputElement>document.getElementById("noCursorLock");
export const disableDrawings = <HTMLInputElement>document.getElementById("noDrawings");
export const disablePlayers = <HTMLInputElement>document.getElementById("disablePlayers");
<file_sep>/src/ts/Networking/Client.ts
import EventEmitter from 'events';
import { MousePositionInterface } from '../types';
import Click from "../classes/Click";
import { mapSize, rendererSettings, defaultURL } from '../gameSettings';
// import { changeStateOfWall, levelObjectsToGrid } from './utils';
import log from '../sexylogs';
import {
LevelObject,
ObjectTypes
} from "../classes/LevelObjects"
import SolidMap from '../SolidMap';
import Opcodes from "./Opcodes";
import { parsePlayers, parseObjects } from './PacketParser';
import { Players } from '../classes/Player';
import Line from "../classes/Line";
export {
compareLevel,
Client,
Options
}
const { width, height } = mapSize;
// function updateClicksOrLines(clicksOrDrawings: Click[]): Click[];
// function updateClicksOrLines(clicksOrDrawings: Line[]): Line[] {
// const now = Date.now();
// clicksOrDrawings.forEach(({removeAt}, i) => {
// if(removeAt < now) clicksOrDrawings.splice(i, 1);
// });
// }
function compareLevel(prevLevels: any, level: LevelObject[]) {
let compare: any = []; // TODO: o.type = ObjectTypes
level.forEach((o: any) => {
if (o.type === ObjectTypes.TEXT) { // text
compare.push({
x: o.x,
y: o.y,
size: o.size,
content: o.content
});
} else if (o.type === ObjectTypes.WALL) { // walls
if (o.color === '#000000') compare.push({ // because other colored walls can be gone often
x: o.x,
y: o.y,
w: o.w,
h: o.h
});
} else if (o.type === ObjectTypes.TELEPORT) {
compare.push({
x: o.x,
y: o.y,
w: o.w,
h: o.h,
isBad: o.isBad
});
} else {
compare.push({ // button / player counter
x: o.x,
y: o.y,
w: o.w,
h: o.h,
color: o.color
});
}
});
compare = JSON.stringify(compare);
let levelIndex = prevLevels.indexOf(compare);
if (levelIndex === -1) {
levelIndex = prevLevels.length;
prevLevels.push(compare);
}
return levelIndex;
}
interface Options {
reconnectTimeout?: number;
autoMakeSocket?: boolean;
log?: boolean;
ws?: string;
reconnect?: boolean
}
class Client extends EventEmitter {
public prevLevels: LevelObject[][] = [];
public levelObjects: LevelObject[] = [];
public options: Options = {};
public players: Players = {};
public solidMap: SolidMap = new SolidMap(width, height);
// public gridSpace: number = 100;
public playersOnLevel: number = 0;
public usersOnline: number = 0;
private lastAck: number = 0; // it has something to do with tcp FIN packet... It just verifies if everything you got is good
/*
#clicksAndDrawingsUpdateInterval: number = window.setInterval(() => {
this.clicks = updateClicksOrLines(this.clicks);
this.lines = updateClicksOrLines(this.lines);
}, 1); // TODO: change to INTERP_TIME
*/
// #jobs: number = 0; // implementation for making bot system (drawText)
public ws: WebSocket | undefined;
public id: number = -1;
public level: number = -1;
public position: MousePositionInterface = { // should be unchangable
x: 0,
y: 0,
canvasX: 0,
canvasY: 0
}
public clicks: Click[] = [];
public lines: Line[] = [];
constructor(options: Options = {}) {
super();
if (!options.ws) options.ws = defaultURL;
if (typeof options.reconnectTimeout !== "number") options.reconnectTimeout = 5000;
if (typeof options.autoMakeSocket === "undefined") options.autoMakeSocket = true;
if (typeof options.log === "undefined") options.log = true;
this.options = options;
if (options.autoMakeSocket) {
this.makeSocket();
} else {
this.log("warn", "Disabled option autoMakeSocket! If you want start bot, do it in your script!");
}
}
private log(type: string, ...args: any[]) {
// @ts-ignore: fuck that error
if (this.options.log) log[type](...args);
}
private resetVariables() {
// this.players = {};
// this.drawings = [];
// this.clicks = [];
// this.prevLevels = [];
// this.levelObjects = [];
// this.grid = new Uint8Array(0);
// this.position = {
// x: 0,
// y: 0,
// canvasX: 0,
// canvasY: 0
// }
// this.ticks = 0;
// //this.jobs = 0;
// this.level = -1;
// this.id = -1;
// this.gridSpace = 100;
// this.usersOnline = 0;
// this.playersOnLevel = 0;
}
private setPosition(x: number, y: number) {
this.position.x = x;
this.position.y = y;
this.position.canvasX = x * 2; // TODO: Scale
this.position.canvasY = y * 2;
}
makeSocket() {
this.resetVariables();
this.ws = new WebSocket(<string>this.options.ws);
this.ws.binaryType = "arraybuffer";
this.ws.onopen = (event: any) => {this.emit("open", event)};
this.ws.onclose = (event: any) => {
this.emit("close", event);
if (this.options.reconnect) setTimeout(this.makeSocket, this.options.reconnectTimeout);
}
this.ws.onerror = (event: any) => this.emit("error", event);
this.ws.onmessage = event => {
const arrayBuffer: ArrayBuffer = event.data;
const len = arrayBuffer.byteLength;
const dv = new DataView(arrayBuffer);
const now = Date.now();
let offset = 0;
const opcode = dv.getUint8(offset++);
this.emit("message", dv, opcode, arrayBuffer);
switch (opcode) { // idk why there is no 2 and 3
case Opcodes.GET_ID: { // got id
this.id = dv.getUint32(offset, true);
this.emit("gotId");
break;
}
case Opcodes.UPDATE: { // cursors updates lines and map changes
{ // players
const {
parsedPlayers,
updatedPlayers,
movedPlayers,
newPlayers,
removedPlayers,
count,
offset: off
} = parsePlayers(dv, offset, this.players, this.id, now);
offset = off;
// !!! it emits almost always.
// There is no way to prevent it without checking length by using Object.keys
// or adding a lot of counters
if(count) {
this.emit("parsedPlayers", parsedPlayers);
this.emit("updatedPlayers", updatedPlayers);
this.emit("movedPlayers", movedPlayers);
this.emit("newPlayers", newPlayers);
}
this.emit("removedPlayers", removedPlayers);
}
// clicks
let count = dv.getUint16(offset, true);
offset += 2;
let clicks: Click[] = [];
for (let i = 0; i < count; i++) {
const x = dv.getUint16(offset, true);
const y = dv.getUint16(offset + 2, true);
const click = new Click(x, y, now);
clicks.push(click);
this.clicks.push(click);
offset += 2 + 2;
}
if(clicks.length) this.emit("newClicks", clicks);
// removed objects
count = dv.getUint16(offset, true);
offset += 2;
let removedObjects: LevelObject[] = [];
for (let i = 0; i < count; i++) {
const idOfObjectToRemove = dv.getUint32(offset, true);
const index = this.levelObjects.findIndex(x => x.id === idOfObjectToRemove);
const obj = this.levelObjects.splice(index, 1)[0];
console.log(idOfObjectToRemove, index, obj)
removedObjects.push(obj);
if(obj.type === ObjectTypes.WALL) this.solidMap.setWallObject(obj, false);
offset += 4;
}
this.emit("removedObjects", removedObjects);
// added or updated objects objects TODO: it doesn't add objects, it updates and adds them
{
const { levelObjects: addedObjects, offset: off} = parseObjects(dv, offset);
offset = off;
this.emit("addedObjects", addedObjects);
addedObjects.forEach(obj => {
this.levelObjects.push(obj);
if(obj.type === 1) this.solidMap.setWallObject(obj, true);
});
}
// Lines
count = dv.getUint16(offset, true);
let newLines: Line[] = [];
offset += 2;
for (let i = 0; i < count; i++) {
const x1 = dv.getUint16(offset, true);
const y1 = dv.getUint16(offset + 2, true);
const x2 = dv.getUint16(offset + 2 + 2, true);
const y2 = dv.getUint16(offset + 2 + 2 + 2, true);
const line = new Line(x1, y1, x2, y2, now);
newLines.push(line);
this.lines.push(line);
offset += 2 + 2 + 2 + 2;
}
this.emit("newDrawings", newLines);
// if (len >= offset + 4) {
// this.lastAck = Math.max(this.lastAck, dv.getUint32(offset, true));
// offset += 4;
// } else if (len >= offset + 2) {
// this.lastAck = Math.max(this.lastAck, dv.getUint16(offset, true));
// offset += 2;
// }
this.usersOnline = dv.getUint32(offset, true);
break;
}
case Opcodes.NEW_LEVEL: {
this.levelObjects.length = 0; // removes all objects
this.solidMap.resetMap();
this.setPosition(dv.getUint16(offset, true), dv.getUint16(offset + 2, true));
offset += 4;
const { levelObjects, offset: off } = parseObjects(dv, offset)
offset = off;
this.levelObjects.push(...levelObjects);
this.solidMap.setLevelObjects(levelObjects);
this.level = compareLevel(this.prevLevels, this.levelObjects);
if (len >= offset + 4) {
this.lastAck = Math.max(this.lastAck, dv.getUint32(offset, true));
} else if (len >= offset + 2) {
this.lastAck = Math.max(this.lastAck, dv.getUint16(offset, true));
}
break;
}
case Opcodes.PREDICTION_ERROR: {
this.setPosition(dv.getUint16(offset, true), dv.getUint16(offset + 2, true));
offset += 4;
if (len >= offset + 4) {
this.lastAck = Math.max(this.lastAck, dv.getUint32(offset, true));
} else if (len >= offset + 2) {
this.lastAck = Math.max(this.lastAck, dv.getUint16(offset, true));
}
break;
}
default: {
console.debug("Unexpected packet: ", dv.getUint8(0));
}
}
}
}
private isConnected(): boolean {
return this.ws?.readyState === WebSocket.OPEN;
}
move(x: number = this.position.x, y: number = this.position.y): boolean {
if (!this.isConnected()) return false;
const array = new ArrayBuffer(9);
const dv = new DataView(array);
dv.setUint8(0, 1);
dv.setUint16(1, x, true);
dv.setUint16(3, y, true);
dv.setUint32(5, this.lastAck, true);
this.ws!.send(array);
this.setPosition(x, y);
return true;
}
click(x: number = this.position.x, y: number = this.position.y): boolean {
if (!this.isConnected()) return false;
const array = new ArrayBuffer(9);
const dv = new DataView(array);
dv.setUint8(0, 2);
dv.setUint16(1, x, true);
dv.setUint16(3, y, true);
dv.setUint32(5, this.lastAck, true);
this.ws!.send(array);
this.setPosition(x, y);
return true;
}
draw(x1: number, y1: number, x2: number, y2: number): boolean {
if (!this.isConnected()) return false;
const array = new ArrayBuffer(9);
const dv = new DataView(array);
dv.setUint8(0, 3);
dv.setUint16(1, x1, true);
dv.setUint16(3, y1, true);
dv.setUint16(5, x2, true);
dv.setUint16(7, y2, true);
this.ws!.send(array);
this.setPosition(x2, y2);
return true;
}
}
<file_sep>/README.md
# cursors.io webpack client
This is my first webpack project so don't kill me plz
## Status
### canvas renderer
finished
### client
i think that finished
### elements
finished
### game settings
uhh idk
### global
finished
### main
not finished
### mouse
finished
### requires
finished
### settings
finished
### types
finished?
### utils
finished
<file_sep>/src/ts/classes/Player.ts
export default class Player {
public joinedAt: number = -1;
constructor(
public id: number,
public x: number = 0,
public y: number = 0
) {}
setPosition(x: number, y: number) {
this.x = x;
this.y = y;
}
}
export interface Players { // this is used for players object not array
[id: string]: Player; // id must be a string because that's a typescript number but it is a string in javascript
}<file_sep>/src/ts/sexylogs.ts
const log = {
msg: console.log.bind(window.console, '%c MSG ', "color: #212121; font-weight:bold; background-color:#b0bec5; padding: 3px 6px; border-radius: 2px;"),
error: console.log.bind(window.console, '%c ERROR ', "color: #ffebee; font-weight:bold; background-color:#c62828; padding: 3px 6px; border-radius: 2px;"),
warn: console.log.bind(window.console, '%c WARN ', "color: #fff3e0; font-weight:bold; background-color:#f4511e; padding: 3px 6px; border-radius: 2px;"),
info: console.log.bind(window.console, '%c INFO ', "color: #ede7f6; font-weight:bold; background-color:#651fff; padding: 3px 6px; border-radius: 2px;"),
success: console.log.bind(window.console, '%c SUCCESS ', "color: #e8f5e9; font-weight:bold; background-color:#2e7d32; padding: 3px 6px; border-radius: 2px;"),
dir: console.dir.bind(window.console),
start: console.groupCollapsed.bind(window.console),
end: console.groupEnd.bind(window.console),
};
//if (process.env.PRODUCTION_BUILD) log.msg = log.dir = log.error = log.warn = log.info = log.success = log.start = log.end = () => {};
export default log;<file_sep>/src/ts/Networking/PacketParser.ts
import {
LevelObject,
TextObject,
DebugObject,
WallObject,
TeleportObject,
CursorCounterObject,
ButtonObject,
ObjectTypes
} from "../classes/LevelObjects";
import Player, { Players } from "../classes/Player";
//
// *
export function parsePlayers(dv: DataView, offset: number, players: Players, ignoreId: number,now: number) {
const count = dv.getUint16(offset, true);
offset += 2;
const parsedPlayers: Players = {}; // players created from the packet
const updatedPlayers: Players = {}; // checks if the player existed before in "players" variable
const movedPlayers: Players = {}; // if player did exist and their position has changed they go here
const newPlayers: Players = {}; // if they didn't exist they go here
const removedPlayers: Players = {}; // if the player was in the players variable but he doesn't exist in the updatedPlayers variable he is put here
for (let i = 0; i < count; i++) {
const id = dv.getUint32(offset, true);
const x = dv.getUint16(offset + 4, true);
const y = dv.getUint16(offset + 6, true);
offset += 8;
if(id === ignoreId) continue;
const parsedPlayer = parsedPlayers[id] = new Player(id, x, y);
const player = players[id];
if (player) { // player got updated
updatedPlayers[id] = player;
if(player.x !== parsedPlayer.x || player.y !== parsedPlayer.y) {
movedPlayers[id] = player;
}
player.setPosition(parsedPlayer.x, parsedPlayer.y);
} else {
parsedPlayer.joinedAt = now;
newPlayers[id] = players[id] = parsedPlayer;
}
}
for(const id in players) {
if(!parsedPlayers[id]) {
removedPlayers[id] = players[id];
delete players[id];
}
}
return {
parsedPlayers,
updatedPlayers,
movedPlayers,
newPlayers,
removedPlayers,
count,
offset
}
}
function parseColor(colorHex: number) {
let color = colorHex.toString(16);
while (color.length < 6) color = "0" + color;
return '#' + color;
}
// : {levelObjects: LevelObject[], offset:number}
export function parseObjects(dv: DataView, offset: number) {
let count = dv.getUint16(offset, true);
let levelObjects: LevelObject[] = [];
offset += 2;
for (let i = 0; i < count; ++i) {
const id = dv.getUint32(offset, true);
offset += 4;
const type = dv.getUint8(offset);
offset++
let obj: LevelObject;
switch (type) {
case ObjectTypes.TEXT: {
obj = new TextObject;
obj.x = dv.getUint16(offset, true);
obj.y = dv.getUint16(offset + 2, true);
obj.fontSize = dv.getUint8(offset + 4);
obj.isCentered = !!dv.getUint8(offset + 5);
offset += 6; // there should be 5
obj.content = "";
let char: number = 0;
while ((char = dv.getUint8(offset++)) !== 0) {
obj.content += String.fromCharCode(char);
}
// offset++;
break;
}
case ObjectTypes.WALL: {
obj = new WallObject;
obj.x = dv.getUint16(offset, true);
obj.y = dv.getUint16(offset + 2, true);
obj.width = dv.getUint16(offset + 4, true);
obj.height = dv.getUint16(offset + 6, true);
obj.color = parseColor(dv.getUint32(offset + 8, true));
offset += 2 + 2 + 2 + 2 + 4;
break;
}
case ObjectTypes.TELEPORT: {
obj = new TeleportObject;
obj.x = dv.getUint16(offset, true);
obj.y = dv.getUint16(offset + 2, true);
obj.width = dv.getUint16(offset + 4, true);
obj.height = dv.getUint16(offset + 6, true);
obj.isBad = !!dv.getUint8(offset + 8);
offset += 2 + 2 + 2 + 2 + 1;
break;
}
case ObjectTypes.CURSOR_COUNTER: {
obj = new CursorCounterObject;
obj.x = dv.getUint16(offset, true);
obj.y = dv.getUint16(offset + 2, true);
obj.width = dv.getUint16(offset + 4, true);
obj.height = dv.getUint16(offset + 6, true);
obj.count = dv.getUint16(offset + 8, true);
obj.color = parseColor(dv.getUint32(offset + 10, true));
offset += 2 + 2 + 2 + 2 + 2 + 4;
break;
}
case ObjectTypes.BUTTON: {
obj = new ButtonObject;
obj.x = dv.getUint16(offset, true);
obj.y = dv.getUint16(offset + 2, true);
obj.width = dv.getUint16(offset + 4, true);
obj.height = dv.getUint16(offset + 6, true);
obj.count = dv.getUint16(offset + 8, true);
obj.color = parseColor(dv.getUint32(offset + 10, true));
//obj.lastClickAt = 0;
offset += 2 + 2 + 2 + 2 + 2 + 4;
break;
}
case ObjectTypes.DEBUG_OBJECT: {
console.warn("Encountered a debug object. This shouldn't happen...");
debugger;
break;
}
default: throw new Error("Unknown object type: " + type);
}
/*
https://www.typescriptlang.org/docs/handbook/2/everyday-types.html#non-null-assertion-operator-postfix-
https://stackoverflow.com/questions/60854745/ts2454-variable-value-is-used-before-being-assigned
*/
obj!.id = id;
levelObjects[i] = obj!;
}
return {
levelObjects,
offset
};
}<file_sep>/src/ts/browserRequire.ts
export default function browserRequire(module: string) {
if (module.startsWith("./")) { // dumb
return require(`./${module.slice(2)}`);
}/* else if(module.startsWith("../")) {
return require(`../${module.slice(3)}`);
}*/ else if (module === "events" || module.toLowerCase() === "eventemitter") { // dumb too
return require("events");
}
}<file_sep>/src/ts/utils.ts
// @ats-nocheck
import { mapSize, defaultURL } from './gameSettings'
import { Point } from './types';
import { LevelObject } from './classes/LevelObjects';
import SolidMap from './SolidMap';
// Parses cursors.io level "M" object to my own this level object // i needed it to test rendering
export function parse(levelObjects: any) {
return levelObjects.map((obj: any) => {
if (obj.color) obj.color = rgbToHex(obj.color.r, obj.color.g, obj.color.b);
if (obj.textHeight) {
obj.size = obj.textHeight;
delete obj.textHeight;
}
return obj;
})
}
// export function isInsideObject(x: number, y: number, obj: LevelObject) {
// // @ts-ignore
// // return x >= obj.x && x < obj.x + obj.width && // @ts-ignore: thiccccccccc
// // y >= obj.y && y < obj.y + obj.height;
// }
// export function isStuckAt(x: number, y: number, grid: Uint8Array[]): boolean {
// return !isInsideMap(x, y) || !!grid[y][x];
// }
/*export function shortGrid(grid: Uint8Array[], gridSpace: number) {
let shortGrid = [];
const shortWidth = realWidth / gridSpace;
const shortHeight = realHeight / gridSpace;
for (let y = 0; y < shortHeight; y++) {
const array = shortGrid[y] = new Uint8Array(shortWidth);
const array2 = grid[y * gridSpace];
for (let x = 0; x < shortWidth; x++) {
array[x] = array2[x * gridSpace];
}
}
return shortGrid;
}*/
// export
export function calculateGridSpace(levelObjects: LevelObject[]) { // this to make pathfinding between walls and collision checking faster
let grid = 100;
for (let length = levelObjects.length, i = 0; i < length; i++) {
if (grid <= 1) { // if it can't find
grid = 1;
break;
}
let levelObject = levelObjects[i];
if (levelObject.type === 1) {
let shortX = levelObject.x / grid,
shortY = levelObject.y / grid,
// @ts-ignore
shortWidth = levelObject.width / grid,
// @ts-ignore
shortHeight = levelObject.height / grid;
if ( // 1.9 | 0 = 1 it truncates decimal point https://stackoverflow.com/questions/7641818/how-can-i-remove-the-decimal-part-from-javascript-number
(shortX | 0) !== shortX ||
(shortY | 0) !== shortY ||
(shortWidth | 0) !== shortWidth ||
(shortHeight | 0) !== shortHeight
) grid--, i = 0;
}
}
return grid;
}
// export function unStuck({ x: x1, y: y1 }: Point, { x: x2, y: y2 }: Point, solidMap: SolidMap): { x: number, y: number, collides: boolean } {
// if (solidMap.isPointSolid(x1, y1)) {
// return { x: x1, y: y1, collides: true };
// }
// if (x1 === x2 && y1 === y2) {
// return { x: x2, y: y2, collides: false };
// }
// let [x, y] = [x1, y1];
// const dx: number = Math.sign(x2 - x1);
// const dy: number = Math.sign(y2 - y1);
// const width: number = Math.abs(x2 - x1);
// const height: number = Math.abs(y2 - y1);
// let collides: boolean = false;
// if (width >= height) {
// const slope: number = height / width;
// for (let i: number = 0; i <= width; i++) {
// if (solidMap.isPointSolid(x, y)) {
// collides = true;
// break;
// }
// x += dx;
// y += Math.round(dy * slope);
// }
// } else {
// const slope: number = width / height;
// for (let i: number = 0; i <= height; i++) {
// if (solidMap.isPointSolid(x, y)) {
// collides = true;
// break;
// }
// x += Math.round(dx * slope);
// y += dy;
// }
// }
// return { x, y, collides};
// }
export function* walk(x1: number, y1: number, x2: number, y2: number) { // it creates a line
const dx = Math.abs(x2 - x1), sx = x1 < x2 ? 1 : -1;
const dy = -Math.abs(y2 - y1), sy = y1 < y2 ? 1 : -1;
let err = dx + dy,
e2;
while(true) {
yield [x1, y1];
if (x1 == x2 && y1 == y2) break;
e2 = 2 * err;
if (e2 >= dy) { err += dy; x1 += sx; }
if (e2 <= dx) { err += dx; y1 += sy; }
}
}
export function unStuck({x: oldX, y: oldY}: Point, {x: newX, y: newY}: Point, solidMap: SolidMap) {
let lastPos = [oldX, oldY];
let collides: boolean | number[] = false;
for(const pos of walk(oldX, oldY, newX, newY)) {
if(solidMap.isPointSolid(pos[0], pos[1])) {
collides = pos;
break;
}
lastPos = pos;
}
return {
x: lastPos[0],
y: lastPos[1],
collides // if collides it returns an array with where exactly it collides
// TODO: try to glow the wall which cursor collides with
}
}
// export function changeStateOfWall(wall: LevelObject, grid: Uint8Array[], state: number) {
// // @ts-ignore
// const x2 = wall.x + wall.width;
// // @ts-ignore
// const y2 = wall.y + wall.height;
// for(let y = wall.y; y < y2; y++) {
// const array = grid[y];
// for(let x = wall.x; x < x2; x++) {
// array[x] = state;
// }
// }
// }
export function rgbToHex(r: number, g: number, b: number) {
return "#" + ((1 << 24) + (r << 16) + (g << 8) + b).toString(16).slice(1);
}
export function generateRainbow(times: number = 32, frequency: number = 0.01) {
let colors = [];
for (let i = 0; i < times; i++) {
let r = Math.sin(frequency * i + 0) * 127 + 128;
let g = Math.sin(frequency * i + 2) * 127 + 128;
let b = Math.sin(frequency * i + 4) * 127 + 128;
colors.push([r, g, b].map(x => Math.round(x)));
}
return colors;
}
export function* rainbowGenerator(frequency: number = 0.1) {
for (let i = 0; ; i++) {
let r = Math.sin(frequency * i + 0) * 127 + 128;
let g = Math.sin(frequency * i + 2) * 127 + 128;
let b = Math.sin(frequency * i + 4) * 127 + 128;
yield [r, g, b].map(x => Math.round(x));
}
}
export function sleep(ms: number) {
return new Promise(resolve => setTimeout(resolve, ms));
}
export async function getCursorsServer() {
return defaultURL;
location.href.replace("http", "ws")
// const info = await findServerPreference("cursors");
// return info && info[0] ? infoToIP(info[0]) : defaultURL;
}<file_sep>/src/ts/classes/LevelObjects.ts
// export interface LevelObject {
// id: number;
// type: objectTypes;
// x: number;
// y: number;
// // ^ all
// width?: number;
// height?: number;
// // ^ wall, exit/red thing, hover, button
// color: string;
// // ^ wall, hover, button
// isBad?: boolean;
// // ^ exit/red thing
// size?: number;
// isCentered?: boolean;
// content?: string;
// // ^ text
// count?: number;
// // ^ hover, button
// }
export enum ObjectTypes {
TEXT,
WALL,
TELEPORT,
CURSOR_COUNTER,
BUTTON,
DEBUG_OBJECT = 0xFF
}
export class LevelObjectStub {
id: number = 0;
x: number = 0;
y: number = 0;
}
export class TextObject extends LevelObjectStub {
readonly type = ObjectTypes.TEXT;
fontSize: number = 0;
isCentered: boolean = false;
content: string = "";
}
export class WallObject extends LevelObjectStub {
readonly type = ObjectTypes.WALL;
width: number = 0;
height: number = 0;
color: string = "#000000";
}
export class TeleportObject extends LevelObjectStub {
readonly type = ObjectTypes.TELEPORT;
width: number = 0;
height: number = 0;
isBad: boolean = false;
}
export class CursorCounterObject extends LevelObjectStub {
readonly type = ObjectTypes.CURSOR_COUNTER;
width: number = 0;
height: number = 0;
color: string = "#000000";
count: number = 0;
}
export class ButtonObject extends LevelObjectStub {
readonly type = ObjectTypes.BUTTON;
width: number = 0;
height: number = 0;
color: string = "#000000";
count: number = 0;
lastClickAt: number = 0; // TODO: set this somewhere
}
export class DebugObject {
readonly type = ObjectTypes.DEBUG_OBJECT;
id: number = 0;
}
export type LevelObject = TextObject | WallObject | TeleportObject | CursorCounterObject | ButtonObject | DebugObject;<file_sep>/src/ts/mouse.ts
import { canvas } from './elements';
import { mapSize } from './gameSettings'
import EventEmitter from 'events';
import { Point, MousePositionInterface } from './types';
import { rendererSettings } from "./gameSettings";
const scale = rendererSettings.scale;
const mouseEvents = new EventEmitter();
const { canvasWidth, canvasHeight } = mapSize;
export default mouseEvents;
export const mousePosition: MousePositionInterface = {
x: 0,
y: 0,
canvasX: 0,
canvasY: 0
};
function getMousePos(canvas: HTMLCanvasElement, evt: MouseEvent): Point {
const rect = canvas.getBoundingClientRect();
return {
x: evt.clientX - rect.left,
y: evt.clientY - rect.top
};
}
const trunc = Math.trunc;
function setPositionX(x: number) {
mousePosition.canvasX = x;
mousePosition.x = trunc(x / scale);
}
function setPositionY(y: number) {
mousePosition.canvasY = y;
mousePosition.y = trunc(y / scale);
}
canvas.addEventListener("mousemove", event => {
const isLockedCanvas = document.pointerLockElement === canvas;
if(isLockedCanvas) {
let canvasX = mousePosition.canvasX + event.movementX;
let canvasY = mousePosition.canvasY + event.movementY;
if(canvasX >= canvasWidth) canvasX = canvasWidth - 1;
else if(canvasX < 0) canvasX = 0;
setPositionX(canvasX)
if(canvasY >= canvasHeight) canvasY = canvasHeight - 1;
else if(canvasY < 0) canvasY = 0;
setPositionY(canvasY)
} else {
const pos = getMousePos(canvas, event);
setPositionX(pos.x);
setPositionY(pos.y);
}
mouseEvents.emit("mousemove", mousePosition, isLockedCanvas, event);
});
canvas.addEventListener("mousedown", event => {
mouseEvents.emit("mousedown", mousePosition, event);
});
canvas.addEventListener("mouseup", event => {
mouseEvents.emit("mouseup", mousePosition, event);
});
<file_sep>/webpack.config.js
const path = require("path");
const webpack = require('webpack');
const fs = require('fs-extra');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const ForkTsCheckerWebpackPlugin = require('fork-ts-checker-webpack-plugin');
const { version: VERSION } = require("./package.json");
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
module.exports = async (env = {}) => {
const srcDir = path.resolve(__dirname, "src");
const isProductionBuild = !!env.production;
const shouldCleanDist = isProductionBuild || env.shouldCleanDist;
const BUILD_NUMBER = +(await fs.readFile("./build.txt", {encoding: "utf8"})) + 1;
await fs.writeFile("./build.txt", BUILD_NUMBER.toString());
const config = {
mode: isProductionBuild ? "production" : "development",
devtool: isProductionBuild ? "source-map" : "eval",
entry: {
client_out: path.resolve(srcDir, "ts", "main.ts")
},
devServer: {
static: {
directory: path.resolve(__dirname, 'dist'),
},
watchFiles: ['src/**/*.ts', 'dist/**/*'],
},
output: {
filename: "[name].js",
path: path.resolve(__dirname, 'dist'),
publicPath: isProductionBuild ? '/' : './',
clean: shouldCleanDist,
},
resolve: {
extensions: ['*', '.ts', '.js', '.json'],
fallback: { // https://webpack.js.org/configuration/resolve/#resolvefallback
buffer: require.resolve('buffer'),
events: require.resolve('events'),
},
},
module: { // https://webpack.js.org/guides/asset-management/#loading-images
rules: [
{
include: path.resolve(srcDir, 'ts'),
loader: 'ts-loader',
},
{
test: /\.(css)$/, // https://stackoverflow.com/questions/53653652/how-to-force-webpack-to-put-the-plain-css-code-into-html-heads-style-tag
use: [
MiniCssExtractPlugin.loader,
"css-loader",
]
},
{
test: /\.(ttf)$/i,
type: 'asset/resource',
generator: {
filename: "fonts/[name][ext]"
},
},
{
test: /\.(png|svg|jpg|jpeg|gif)$/i,
type: 'asset/resource',
generator: {
filename: "img/[name][ext]"
},
},
]
},
plugins: [
new MiniCssExtractPlugin(),
new ForkTsCheckerWebpackPlugin(),
new HtmlWebpackPlugin({
title: 'Modded cursors.io client', // Cursors
inject: 'body',
template: path.resolve(srcDir, 'index.html'),
favicon: path.resolve(srcDir, 'favicon.ico'),
minify: "auto"
}),
new webpack.EnvironmentPlugin({
PRODUCTION_BUILD: isProductionBuild,
BUILD_NUMBER,
VERSION,
})
]
};
if (config.output.clean) {
console.log(`Cleaning build dir: '${config.output.path}'`);
}
console.log(`${config.mode} build\nVersion: ${VERSION}\nBuild: ${BUILD_NUMBER}\n`);
return config;
}<file_sep>/readwholedir.js
const fs = require("fs-extra");
const path = require("path");
async function readWholeDir(dir) {
let tree = {};
for(let file of await fs.readdir(dir)) {
const a = path.resolve(dir, file);
try {
if((await fs.stat(a)).isDirectory()) {
tree[file] = await readWholeDir(a);
} else {
tree[file] = true;
}
} catch(e) {
tree[file] = false; // error
}
}
return tree;
}
module.exports = readWholeDir;<file_sep>/src/ts/Networking/Opcodes.ts
enum Opcode { // OPeration code
GET_ID = 0x00,
UPDATE = 0x01, // cursors, clicks, remove objects, add or update objects, lines
NEW_LEVEL = 0x04,
PREDICTION_ERROR = 0x05 // Collision error / Teleport
}
export default Opcode;<file_sep>/src/ts/types.ts
export interface Player {
id: number;
x: number;
y: number;
// joinedAt: number; // innacutare and useless
}
export interface Point {
x: number;
y: number;
}
export interface MousePositionInterface extends Point {
canvasX: number;
canvasY: number;
} <file_sep>/src/ts/classes/Click.ts
import { rendererSettings } from "../gameSettings";
const clickRenderTime = rendererSettings.clickRenderduration
export default class Click {
public removeAt: number;
constructor(public x: number, public y: number,
public clickedAt: number
) {
this.removeAt = clickedAt + clickRenderTime;
}
} | 60bbc459590bd7104a90df11a8d30841e80a4941 | [
"Markdown",
"TypeScript",
"JSON with Comments",
"JavaScript"
] | 23 | TypeScript | mathmakgakpak/cursors.io-webpack-client | 3ed7b3818517902083b01c62e4023f7f59d53c5e | 7abaeae90b9fc23475fd77ead5bc6d8e8b204743 |
refs/heads/master | <file_sep>package mx.ipn.cic.appsitiostur;
import android.support.v4.app.FragmentActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.MarkerOptions;
import java.util.Arrays;
import java.util.Hashtable;
import java.util.List;
public class MapsActivity extends FragmentActivity implements OnMapReadyCallback{
private GoogleMap mMap;
private Hashtable<String, List<Double>> datosMarcador;
private Button buttonBellasArtes;
private Button buttonESCOM;
private Button buttonCIC;
private Button buttonZocalo;
private Button buttonGuadalajara;
private Button buttonPuebla;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_maps);
// Obtain the SupportMapFragment and get notified when the map is ready to be used.
SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager()
.findFragmentById(R.id.map);
mapFragment.getMapAsync(this);
// Creamos el diccionario de latitudes y longitudes
datosMarcador = new Hashtable<>();
llenarDatos();
// Referencias a los botones
buttonBellasArtes = findViewById(R.id.buttonBellasArtes);
buttonESCOM = findViewById(R.id.buttonEscom);
buttonCIC = findViewById(R.id.buttonCIC);
buttonZocalo = findViewById(R.id.buttonZocalo);
buttonGuadalajara = findViewById(R.id.buttonGuadalajara);
buttonPuebla = findViewById(R.id.buttonPuebla);
// Listener de los botones
buttonBellasArtes.setOnClickListener(OnClickListener);
buttonESCOM.setOnClickListener(OnClickListener);
buttonCIC.setOnClickListener(OnClickListener);
buttonZocalo.setOnClickListener(OnClickListener);
buttonGuadalajara.setOnClickListener(OnClickListener);
buttonPuebla.setOnClickListener(OnClickListener);
}
/**
* Manipulates the map once available.
* This callback is triggered when the map is ready to be used.
* This is where we can add markers or lines, add listeners or move the camera. In this case,
* we just add a marker near Sydney, Australia.
* If Google Play services is not installed on the device, the user will be prompted to install
* it inside the SupportMapFragment. This method will only be triggered once the user has
* installed Google Play services and returned to the app.
*/
@Override
public void onMapReady(GoogleMap googleMap) {
mMap = googleMap;
}
private void llenarDatos(){
datosMarcador.put("<NAME>", Arrays.asList(19.435651, -99.141226));
datosMarcador.put("ESCOM", Arrays.asList(19.504507, -99.146911));
datosMarcador.put("CIC", Arrays.asList(19.503096, -99.147593));
datosMarcador.put("Zocalo", Arrays.asList(19.432622, -99.133177));
datosMarcador.put("Guadalajara", Arrays.asList(20.675171, -103.347328));
datosMarcador.put("Puebla", Arrays.asList(19.041439, -98.206276));
}
private void addSelectedMarker(String place){
List<Double> position= datosMarcador.get(place);
LatLng selectedPlace = new LatLng(position.get(0), position.get(1));
mMap.addMarker(new MarkerOptions().position(selectedPlace).title("Marker in "+place));
mMap.moveCamera(CameraUpdateFactory.newLatLng(selectedPlace));
}
private View.OnClickListener OnClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
String texto = ((Button) v).getText().toString();
addSelectedMarker(texto);
}
};
}
<file_sep># Android
Dispositivos Móviles
<file_sep>package mx.ipn.cic.calculadora;
import java.util.Stack;
import java.util.StringTokenizer;
public class Operaciones {
// Constructor
public Operaciones(){ }
private Stack<String> digitos = new Stack<String>();
private Stack<String> operadores = new Stack<String>();
public String convertirPostFijo(StringTokenizer cadena) {
String expresionPost = new String("");
while(cadena.hasMoreElements()) {
try {
String element = cadena.nextToken();
if (Character.isLetterOrDigit(element.charAt(0))) {
expresionPost += element + " ";
} else if (element.equals("(")) {
operadores.push(element);
} else if (element.equals(")")) {
while (!operadores.isEmpty() && !operadores.peek().equals("(")) {
expresionPost += operadores.pop() + " ";
} // while.
if (operadores.isEmpty() && operadores.peek().equals("(")) {
return "Math error";
} else {
operadores.pop();
} // else.
} else {
while (!operadores.isEmpty() && pesosOperadores(element) <= pesosOperadores(operadores.peek())) {
expresionPost += operadores.pop() + " ";
}
operadores.push(element);
}
} catch (Exception e) {
return "Math Error";
}
}
while (!operadores.isEmpty()) {
expresionPost += operadores.pop() + " ";
}
return expresionPost;
}
public String expresionFinal(StringTokenizer expresionPost) {
Double a;
Double b;
Double c;
while(expresionPost.hasMoreElements()) {
try {
String element = expresionPost.nextToken();
if(isOperador(element)) {
if(expresionTrigonometrica(element) || element.equals("$")) {
a = Double.parseDouble(expresionPost.nextToken());
c = operacion(a, element);
} else {
b = Double.parseDouble(digitos.pop());
a = Double.parseDouble(digitos.pop());
c = operacion(a, b, element);
}
digitos.push(c.toString());
continue;
} else {
digitos.push(element);
}
} catch (Exception e) {
return "Math Error";
}
}
return digitos.pop();
} // method.
public Integer pesosOperadores(String operador) {
if (operador.equals("Sin")) {
return 4;
} else if (operador.equals("Cos")) {
return 4;
} else if (operador.equals("Tan")) {
return 4;
} else if (operador.equals("Cot")) {
return 4;
} else if (operador.equals("Sec")) {
return 4;
} else if (operador.equals("Csc")) {
return 4;
} else if (operador.equals("^")) {
return 3;
} else if (operador.equals("*")) {
return 2;
} else if (operador.equals("/")) {
return 2;
} else if (operador.equals("+")) {
return 1;
} else if (operador.equals("-")) {
return 1;
} else if (operador.equals("(")) {
return 0;
} else if (operador.equals(")")) {
return 0;
} else if(operador.equals("$")){
return 3;
}
else {
return -1;
} // else.
} // method.
public Double operacion(Double a, String operador) {
if (operador.equals("Sin")) {
return Math.sin(a);
} else if (operador.equals("Cos")) {
return Math.cos(a);
} else if (operador.equals("Tan")) {
return Math.tan(a);
} else if (operador.equals("Cot")) {
return 1 / Math.tan(a);
} else if (operador.equals("Sec")) {
return 1 / Math.cos(a);
} else if (operador.equals("Csc")) {
return 1 / Math.sin(a);
} else if(operador.equals("$")){
return Math.sqrt(a);
}
else {
return -1.0;
}
}
public Double operacion(Double a, Double b, String operador) {
if (operador.equals("^")) {
return Math.pow(a, b);
} else if (operador.equals("*")) {
return a * b;
} else if (operador.equals("/")) {
return a / b;
} else if (operador.equals("+")) {
return a + b;
} else if (operador.equals("-")) {
return a - b;
} else {
return -1.0;
}
}
public Boolean expresionTrigonometrica(String operador) {
if (operador.equals("Sin")) {
return true;
} else if (operador.equals("Cos")) {
return true;
} else if (operador.equals("Tan")) {
return true;
} else if (operador.equals("Cot")) {
return true;
} else if (operador.equals("Sec")) {
return true;
} else if (operador.equals("Csc")) {
return true;
} else {
return false;
}
}
public Boolean isOperador(String operador) {
try{
Double.parseDouble(operador);
return false;
}catch(Exception e){
return true;
}
}
}
| 29d24de2045268d239ce06a6bf45ca67f999d029 | [
"Markdown",
"Java"
] | 3 | Java | SaidNM/Android | c678edfe0c1c155409f56b3e49ac546852f4f491 | 3b7174da02f5ed79b6c936660bdbfdf3250872fc |
refs/heads/master | <file_sep>package com.chakrireddy.ott31;
import android.content.ActivityNotFoundException;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.LinearLayout;
public class Contact extends AppCompatActivity implements View.OnClickListener {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.contact_activity);
LinearLayout website = (LinearLayout) findViewById(R.id.websitelayout);
website.setOnClickListener(this);
LinearLayout instagram = (LinearLayout) findViewById(R.id.instagramlayout);
instagram.setOnClickListener(this);
LinearLayout linkedin = (LinearLayout) findViewById(R.id.linkedinlayout);
linkedin.setOnClickListener(this);
LinearLayout email = (LinearLayout) findViewById(R.id.emaillayout);
email.setOnClickListener(this);
} //E.O.Oncreate
@Override
public void onClick(View view) {
final int id = view.getId();
switch (id) {
case R.id.websitelayout:
startBrowserActivity("http://www.chakrireddy.com", "empty");
break;
case R.id.instagramlayout:
startBrowserActivity("http://www.instagram.com/chkry", "com.instagram.android");
break;
case R.id.linkedinlayout:
startBrowserActivity("http://www.linkedin.com/in/chkry", "com.linkedin.android");
break;
case R.id.emaillayout:
Intent email = new Intent(Intent.ACTION_SEND);
email.putExtra(Intent.EXTRA_EMAIL, new String[]{"<EMAIL>"});
email.putExtra(Intent.EXTRA_SUBJECT, "OTT 3/1 - Email Response");
//need this to prompts email client only
email.setType("message/rfc822");
startActivity(Intent.createChooser(email, "Choose an Email client :"));
break;
}
} //E.o.OnClick
public void startBrowserActivity(String str, String pack) {
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(str));
browserIntent.setPackage(pack);
try {
startActivity(browserIntent);
} catch (ActivityNotFoundException e) {
startActivity(new Intent(Intent.ACTION_VIEW,
Uri.parse(str)));
}
}
}
| 86b167f5a0b13216791a08baf4b6e9b1e9f04e8b | [
"Java"
] | 1 | Java | chkry/OTT3-1 | e22b0b3c326f897dd0ea0b2d00ca30a0a3af211d | c33f188024dbcef71b39ddfac463657289fc5866 |
refs/heads/master | <repo_name>radishrp/TicTacToe<file_sep>/main.c
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
//Function Definitions
int checkwin(char *, char);
void winmessage(int, char);
int findwinningmove(char *,char);
int numberofmovesleft(char *);
char player;
int makemove(char*,int,char);
int computermove(char *);
int aichoice;
int main(){
int win_result_return_value;
char newgame ='G';
char inputdata[]=" "; //This is the board data.
display_tictactoe_boardarray(inputdata);
int m=1;
aichoice = 10;
TicTacToeRun(inputdata);
while(m=1)
{ //Asks user if he or she wants to play again.
if(newgame=='Y')
{
strcpy(inputdata," ");
TicTacToeRun(inputdata);
}
if(newgame=='N')
{
exit(0);
}
else
{
printf("Error. Play again? Y/N?\n");
scanf(" %C",&newgame);
};
};
};
//TicTacToeRun() is the function that runs the game.
//It keeps track of whose turn it is.
//It also breaks out of the function if either player wins.
//At the end of the 9 turns, if no player has won, the "tie" message will appear.
int TicTacToeRun(char *inputdata)
{
int win_position_return_value=0;
int j=0;
for(;j<9;j++)
{
if(j%2==0)
{
humanmakemove(inputdata,'X');
win_position_return_value = checkwin(inputdata,'X');
winmessage(win_position_return_value,'X');
if(win_position_return_value !=0)
{
break;
}
}
else
{
computermove(inputdata);
win_position_return_value = checkwin(inputdata,'O');
winmessage(win_position_return_value,'O');
if(win_position_return_value !=0)
{
break;
};
};
};
if(win_position_return_value==0)
{
printf("It's a tie!\n");
};
}
//This is the function that runs when it is the player's turn.
//The board is first drawn with the numbers corresponding to the board data, then the move is made according to the input.
void humanmakemove(char *inputdata, char player)
{
int num=0;
numberofmovesleft(inputdata);
printf("[0][1][2]\n[3][4][5]\n[6][7][8]\nYour turn. Where do you want to place?\nType in a valid number listed above.\nPlacement:");
scanf("%d",&num);
makemove(inputdata,num,'X');
};
//Counts the number of moves left and counts the number of turns left.
//Now that I think about it, it's really unnecessary, as I could just add it tictactoe function.
int numberofmovesleft(char *inputdata)
{
int i, j=0;
for(i=0;i<10;i++)
{
if(inputdata[i]== ' ')
{
j++;
};
};
printf("Turn %d.\n%d turns left.\n\n",10-j,j);
return j;
}
//The function that makes the move. Also unnecessary. Can be placed in computermove() and humanmakemove() easily.
//Move the empty check to humanmakemove and the actual one line placement function to each.
int makemove(char *inputdata,int placement, char player)
{
if (inputdata[placement]==' ')
{
inputdata[placement] = player;
printf("Last move:\n");
display_tictactoe_boardarray(inputdata);
}
else
{
printf("ERROR TRY AGAIN!!\n\n");
humanmakemove(inputdata,'X');
};
}
//Computer's function to move.
//First it picks a winning move if possible
//Then it finds the user's winning move and prevents it.
//Then it follows the "next best move" approach.
//Note that all AI moves are made in the function. The other functions only find the moves.
//The aichoice variable is used here to keep the AI from making more than 1 move at a time.
int computermove(char *inputdata)
{
aichoice=findwinningmove(inputdata,'O');
if(aichoice!=10)
{
makemove(inputdata,aichoice,'O');
aichoice=1;
return 0;
}
if(aichoice==10)
{
findwinningmove(inputdata,'X');
if(aichoice!=10)
{
makemove(inputdata,aichoice,'O');
aichoice=1;
return 0;
}
}
if(aichoice==10)
{
findnextbestmove(inputdata);
};
}
//The code for finding the winning move.
int findwinningmove(char *inputdata,char player)
{
int i, win_result_return_value;
char test_position[10];
for (i = 0; i < 9; i++)
{
if (inputdata[i] == ' ')
{
strcpy(test_position,inputdata);
test_position[i] = player;
int win_result_return_value = checkwin(test_position, player);
if(win_result_return_value != 0)
{
aichoice = i;
return aichoice;
break;
}
else
{
continue;
};
};
};
aichoice=10;
return aichoice;
};
//Find the "next best move".
//The code places in the middle first, then next to the opponent's move if they placed in the corner.
//Then it places in the corners.
int findnextbestmove(char *inputdata)
{
int i, aichoice2=1;
if(inputdata[4]==' ')
{
//places at middle if empty
makemove(inputdata,4,'O');
return 0;
}
else
{
for (i = 0; i < 9; i+=2)
{
//checks corners for 'X', then places beside it.
if (inputdata[i] == 'X')
{
if ((i<7) && (i!=4) && (inputdata[i+1]) == ' ')
{
makemove(inputdata,(i+1),'O');
aichoice2=0;
break;
}
if ((i>0) && (i!=4) && (inputdata[i-1]) == ' ')
{
makemove(inputdata,(i-1),'O');
aichoice2=0;
break;
}
else
{
continue;
}
}
};
if(aichoice2==1)
{
//places in corner
for(i=0;i<9;i+=2)
{
if(inputdata[i]==' ')
{
makemove(inputdata,i,'O');
break;
}
}
}
}
}
//Displays the type of win achieved.
void winmessage(int win_position_return_value, char player)
{
switch(win_position_return_value)
{
case 10 : printf("Horizontal win on Row 1 by %c!\n",player); return 0;
case 13 : printf("Horizontal win on Row 2 by %c!\n",player); return 0;
case 16 : printf("Horizontal win on Row 3 by %c!\n",player); return 0;
case 20 : printf("Vertical win on Column 1 by %c!\n",player); return 0;
case 21 : printf("Vertical win on Column 2 by %c!\n",player); return 0;
case 22 : printf("Vertical win on Column 3 by %c!\n",player); return 0;
case 30 : printf("Diagonal win by %c!\n", player); return 0;
default: printf("No win condition.\n"); return 0;
};
};
//makes the "graphic render"
void display_tictactoe_boardarray(char *inputdata)
{
char tictactoe_boardarray[] = "[ ][ ][ ] [ ][ ][ ] [ ][ ][ ]";
int i , j , k ; k=0 ;
for(i=0;i<3;i++)
{
for(j=1;j<=7;j+=3)
{
tictactoe_boardarray[(i*10)+j] = inputdata[k++];
};
};
printf("\n");
for(i=0;i<30;i++)
{
printf("%c",tictactoe_boardarray[i]);
if(i==9||i==19||i==29)
{
printf("\n");
};
};
};
//Checks whether a player has won or not.
int checkwin(char *inputdata,char player)
{
int i;
for(i=0;i<7;i+=3)
{
if(inputdata[i]== player && inputdata[i+1]== player && inputdata[i+2]== player)
{
return (10+i);
};
}; //horizontal win condition
for(i=0;i<3;i++)
{
if(inputdata[i]== player && inputdata[i+3]== player && inputdata[i+6]== player)
{
return (20+i);
}
}; //vertical win condition
if((inputdata[0]== player && inputdata[4]== player && inputdata[8]== player) || (inputdata[2]== player && inputdata[4]== player && inputdata[6]== player))
{
return 30; // diagonal win condition
};
return 0;
};
| a6f9cd0e83af79afc5c75b0ebed9fec2a733dfd6 | [
"C"
] | 1 | C | radishrp/TicTacToe | 4f4d7a647425c8d028d9a1f4666844fa82f1897a | 6ff1795415865769f465862fe28b42354be40f16 |
refs/heads/master | <file_sep>This is an android note app, it now supports for taking a note, deleting and sharing the note you take.
<file_sep>package com.yihsi.android.easynotes;
import android.content.Intent;
import android.os.Build;
import android.support.v4.app.ListFragment;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.v4.content.ContextCompat;
import android.view.ActionMode;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.TextView;
import java.util.ArrayList;
/**
* A placeholder fragment containing a simple view.
*/
public class NoteListFragment extends ListFragment {
private ArrayList<Note> mNotes;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mNotes = NoteLab.getInstance(getActivity()).getNotes();
NoteAdapter adapter = new NoteAdapter(mNotes);
setListAdapter(adapter);
setHasOptionsMenu(true);
setRetainInstance(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_note_list, container, false);
getActivity().getActionBar();
ListView listView = (ListView)view.findViewById(android.R.id.list);
listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE_MODAL);
listView.setMultiChoiceModeListener(new AbsListView.MultiChoiceModeListener() {
@Override
public void onItemCheckedStateChanged(ActionMode mode, int position,
long id, boolean checked) {
}
//ActionMode.Callback methods
@Override
public boolean onCreateActionMode(ActionMode mode, Menu menu) {
MenuInflater inflater = mode.getMenuInflater();
inflater.inflate(R.menu.note_item_selected, menu);
//If API level is not smaller than 21, change status bar color to #757575
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
getActivity().getWindow().setStatusBarColor(ContextCompat.getColor(
getActivity(), R.color.actionModePrimaryDark));
}
return true;
}
@Override
public boolean onPrepareActionMode(ActionMode mode, Menu menu) {
return false;
}
@Override
public boolean onActionItemClicked(ActionMode mode, MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_item_delete_note:
NoteAdapter adapter = (NoteAdapter) getListAdapter();
NoteLab noteLab = NoteLab.getInstance(getActivity());
for (int i = adapter.getCount() - 1; i >= 0; i--) {
if (getListView().isItemChecked(i)) {
noteLab.removeNote(adapter.getItem(i));
}
}
mode.finish();
adapter.notifyDataSetChanged();
noteLab.saveNotes();
return true;
default:
return false;
}
}
@Override
public void onDestroyActionMode(ActionMode mode) {
//Change the color of status bar back
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
getActivity().getWindow().setStatusBarColor(ContextCompat.getColor(
getActivity(), R.color.colorPrimaryDark));
}
}
});
FloatingActionButton mAddNote = (FloatingActionButton) view.findViewById(R.id.add_note);
mAddNote.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Note note = new Note();
NoteLab.getInstance(getActivity()).addNote(note);
Intent intent = new Intent(getActivity(), NoteActivity.class);
intent.putExtra(NoteFragment.EXTRA_NOTE_ID, note.getId());
startActivity(intent);
}
});
return view;
}
@Override
public void onListItemClick(ListView l, View v, int position, long id) {
Note note = ((NoteAdapter)getListAdapter()).getItem(position);
Intent intent = new Intent(getActivity(), NoteActivity.class);
intent.putExtra(NoteFragment.EXTRA_NOTE_ID, note.getId());
startActivity(intent);
}
//Inner class extends ArrayAdapter to customize list item
private class NoteAdapter extends ArrayAdapter<Note> {
public NoteAdapter(ArrayList<Note> notes) {
//Argument 0 indicates using customized layout
super(getActivity(), 0, notes);
}
@Override
//Return a view generates from the customized layout
public View getView(int position, View convertView, ViewGroup parent) {
//If weren't given a view, inflate one
if (convertView == null) {
convertView = getActivity().getLayoutInflater()
.inflate(R.layout.list_item_note, null);
}
//Configure the view for this note
Note note = getItem(position);
TextView titleTextView = (TextView)convertView.findViewById(R.id.note_item_title);
titleTextView.setText(note.getTitle());
TextView textTextView = (TextView)convertView.findViewById
(R.id.note_item_text);
if (note.getText() != null) {
//If the note is filled with whitespace or
if (note.getText().matches("\\s+|R+")) {
textTextView.setText(null);
}
else {
//If the note's length is smaller than 180, display all the characters
if (note.getText().length() < 170) {
textTextView.setText(note.getText());
}
//If the note's length is not smaller than 100, display the first 100 characters
//with "..." appended to the 180th character
else {
textTextView.setText(note.getText().substring(0, 169) + "...");
}
}
}
else {
//If note's title and text are both null, abandon the note
if (note.getTitle() == null) {
NoteLab noteLab = NoteLab.getInstance(getActivity());
noteLab.removeNote(note);
((NoteAdapter) getListAdapter()).notifyDataSetChanged();
noteLab.saveNotes();
}
}
return convertView;
}
}
@Override
public void onResume() {
super.onResume();
((NoteAdapter)getListAdapter()).notifyDataSetChanged();
}
}
<file_sep>package com.yihsi.android.easynotes;
import android.content.Context;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.UUID;
/**
* Created by leodore on 2015/12/2.
*/
public class NoteLab {
private static final String TAG = "NoteLab";
private static final String FILENAME = "notes.json";
private ArrayList<Note> mNotes;
private NoteJSONSerializer mSerializer;
private static NoteLab sNoteLab;
private Context mAppContext;
private NoteLab(Context appContext) {
mAppContext = appContext;
mNotes = new ArrayList<>();
mSerializer = new NoteJSONSerializer(mAppContext, FILENAME);
try {
mNotes = mSerializer.loadNotes();
} catch (Exception e) {
mNotes = new ArrayList<>();
//Log.e(TAG, "Error loading notes: ", e);
Toast.makeText(mAppContext, R.string.error_load, Toast.LENGTH_SHORT).show();
}
}
public static NoteLab getInstance(Context c) {
if (sNoteLab == null) {
sNoteLab = new NoteLab(c.getApplicationContext());
}
return sNoteLab;
}
public ArrayList<Note> getNotes() {
return mNotes;
}
public Note getNote(UUID id) {
for (Note n : mNotes) {
if (n.getId().equals(id))
return n;
}
return null;
}
public void addNote(Note n) {
mNotes.add(n);
}
public void removeNote(Note n) {
mNotes.remove(n);
}
public boolean saveNotes() {
try {
mSerializer.saveNotes(mNotes);
//Log.d(TAG, "notes saved to file.");
return true;
} catch (Exception e) {
// Log.e(TAG, "Error saving notes: ", e);
Toast.makeText(mAppContext, R.string.error_save, Toast.LENGTH_SHORT).show();
return false;
}
}
}
<file_sep>package com.yihsi.android.easynotes;
import android.support.v4.app.Fragment;
import java.util.UUID;
/**
* Created by leodore on 2015/12/1.
*/
public class NoteActivity extends SingleFragmentActivity {
@Override
protected Fragment createFragment() {
UUID mNoteId = (UUID)getIntent().getSerializableExtra(NoteFragment.EXTRA_NOTE_ID);
return NoteFragment.newInstance(mNoteId);
}
}
<file_sep>package com.yihsi.android.easynotes;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.Date;
import java.util.UUID;
/**
* Created by leodore on 2015/12/2.
*/
public class Note {
private static final String JSON_ID = "id";
private static final String JSON_TITLE = "title";
private static final String JSON_TEXT ="content";
private static final String JSON_DATE ="date";
private UUID mId;
private String mTitle;
private String mText;
private Date mDate;
public Note() {
mId = UUID.randomUUID();
mDate = new Date();
}
public Note(JSONObject json) throws JSONException {
mId = UUID.fromString(json.getString(JSON_ID));
if (json.has(JSON_TITLE))
mTitle = json.getString(JSON_TITLE);
if (json.has(JSON_TEXT))
mText = json.getString(JSON_TEXT);
mDate = new Date(json.getLong(JSON_DATE));
}
public UUID getId() {
return mId;
}
public String getTitle() {
return mTitle;
}
public void setTitle(String title) {
mTitle = title;
}
public String getText() {
return mText;
}
public void setText(String text) {
mText = text;
}
public Date getDate() {
return mDate;
}
public void setDate(Date date) {
mDate = date;
}
public JSONObject toJSON() throws JSONException {
JSONObject json = new JSONObject();
json.put(JSON_ID, mId.toString());
json.put(JSON_TITLE, mTitle);
json.put(JSON_TEXT, mText);
json.put(JSON_DATE, mDate.getTime());
return json;
}
}
| ca6530b099cc5d40760c5986ca47623acda458c7 | [
"Markdown",
"Java"
] | 5 | Markdown | yihsi/EasyNotes | 9ce689c77e7928904d6c7d4212746f76d37ce37d | 27f136a9e94a6eb62fa60a5b3d505b934a445e09 |
refs/heads/master | <repo_name>alexbur7/MVVM_bar_test_application<file_sep>/app/src/main/java/com/example/mvvm_test_application/model/components/MainActivityComponent.java
package com.example.mvvm_test_application.model.components;
import com.example.mvvm_test_application.model.dagger_models.MainActivityModule;
import com.example.mvvm_test_application.view.MainActivity;
import dagger.Component;
@Component(modules = MainActivityModule.class)
public interface MainActivityComponent {
void inject(MainActivity activity);
}
<file_sep>/app/src/main/java/com/example/mvvm_test_application/view/MainActivity.java
package com.example.mvvm_test_application.view;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.fragment.app.FragmentActivity;
import android.app.ProgressDialog;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Bundle;
import android.util.Log;
import android.widget.Toast;
import com.example.mvvm_test_application.R;
import com.example.mvvm_test_application.model.CocktailAdapter;
import com.example.mvvm_test_application.model.components.DaggerMainActivityComponent;
import com.example.mvvm_test_application.model.dagger_models.ContextAndCallbacksModule;
import com.example.mvvm_test_application.model.dagger_models.MainActivityModule;
import com.example.mvvm_test_application.utils.DownloaderService;
import com.example.mvvm_test_application.viewmodel.CocktailViewModel;
import com.example.mvvm_test_application.viewmodel.DrinkTypeViewModel;
import javax.inject.Inject;
public class MainActivity extends AppCompatActivity implements DrinkTypeViewModel.Callback, CocktailViewModel.Callback, CocktailAdapter.Callback, DownloaderService.UILoadingCommander {
@Inject
DownloaderService downloaderService;
@Inject
ProgressDialog progressDialog;
@Inject
ServiceConnection connection;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
DaggerMainActivityComponent.builder().contextAndCallbacksModule(new ContextAndCallbacksModule(this))
.mainActivityModule(new MainActivityModule())
.build().inject(this);
setContentView(R.layout.activity_fragment);
bindService(new Intent(this,DownloaderService.class),connection,BIND_AUTO_CREATE);
}
@Override
public void onDrinkTypeClicked(int position) {
getSupportFragmentManager().beginTransaction().replace(R.id.fragment_container,DrinkViewPagerFragment.newInstance(position))
.addToBackStack(null).commit();
}
@Override
public void openWebSite(String url) {
getSupportFragmentManager().beginTransaction().add(R.id.fragment_container, CocktailWebViewFragment.newInstance(url))
.addToBackStack(null).commit();
}
@Override
public void openInformation() {
getSupportFragmentManager().beginTransaction().replace(R.id.fragment_container, new CocktailFragment())
.addToBackStack(null).commit();
}
@Override
public void showDialog() {
runOnUiThread(new Runnable() {
@Override
public void run() {
progressDialog.show();
}
});
}
@Override
public void dismissDialog() {
runOnUiThread(new Runnable() {
@Override
public void run() {
progressDialog.dismiss();
}
});
}
@Override
public void failDownloading() {
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(MainActivity.this, "Ошибка при загрузке,проверьте подключение к интернету", Toast.LENGTH_SHORT).show();
progressDialog.dismiss();
}
});
}
@Override
protected void onDestroy() {
super.onDestroy();
unbindService(connection);
downloaderService =null;
}
}<file_sep>/app/src/main/java/com/example/mvvm_test_application/model/dagger_models/CocktailAdapterModule.java
package com.example.mvvm_test_application.model.dagger_models;
import androidx.fragment.app.FragmentActivity;
import com.example.mvvm_test_application.model.Cocktail;
import com.example.mvvm_test_application.model.CocktailAdapter;
import com.example.mvvm_test_application.utils.RetrofitSingleton;
import java.util.List;
import dagger.Module;
import dagger.Provides;
@Module(includes = {ContextAndCallbacksModule.class})
public class CocktailAdapterModule {
private CocktailAdapter.Callback callback;
private int position;
public CocktailAdapterModule(int position){
this.position = position;
}
@Provides
public CocktailAdapter provideCocktailAdapter(CocktailAdapter.Callback callback){
//this.activity = activity;
this.callback = callback;
if (position == 0) {
return new CocktailAdapter(RetrofitSingleton.getCocktailsFilteredList("Виски"));
//Виски
} else if (position == 1) {
return new CocktailAdapter(RetrofitSingleton.getCocktailsFilteredList("Водка"));
//Водка
} else {
return new CocktailAdapter(RetrofitSingleton.getCocktailsFilteredList("Шампанское"));
//Шампанское
}
}
}
<file_sep>/app/src/main/java/com/example/mvvm_test_application/utils/DownloaderService.java
package com.example.mvvm_test_application.utils;
import android.app.Service;
import android.content.Intent;
import android.graphics.drawable.Drawable;
import android.os.Binder;
import android.os.IBinder;
import android.util.Log;
import androidx.annotation.Nullable;
import com.bumptech.glide.Glide;
import com.example.mvvm_test_application.model.Cocktail;
import com.example.mvvm_test_application.model.components.DaggerDownloaderServiceComponent;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory;
import javax.inject.Inject;
public class DownloaderService extends Service {
@Inject
ExecutorService serviceThread;
@Inject
RetrofitSingleton retrofitSingleton;
public class DownloadBinder extends Binder{
private final DownloaderService service;
public DownloadBinder(DownloaderService service){
this.service=service;
}
public DownloaderService getService(){return service;}
}
@Override
public void onCreate() {
super.onCreate();
DaggerDownloaderServiceComponent.builder().build().inject(this);
}
public interface UILoadingCommander {
void showDialog();
void dismissDialog();
void failDownloading();
}
@Override
public void onDestroy() {
super.onDestroy();
serviceThread.shutdown();
}
@Nullable
@Override
public IBinder onBind(Intent intent) {
return new DownloadBinder(this);
}
public void downloadAllCocktails(final UILoadingCommander commander) throws InterruptedException {
new Thread(new Runnable() {
@Override
public void run() {
commander.showDialog();
Future<List<Cocktail>> future= serviceThread.submit(new Callable<List<Cocktail>>() {
@Override
public List<Cocktail> call() {
try {
List<Cocktail> list=retrofitSingleton.getCocktailsApi().getCocktails().execute().body();
commander.dismissDialog();
return list;
} catch (IOException e) {
e.printStackTrace();
commander.failDownloading();
}
return new ArrayList<>();
}
});
try {
RetrofitSingleton.setCocktailsList(future.get());
} catch (ExecutionException | InterruptedException e) {
e.printStackTrace();
commander.failDownloading();
}
}
}).start();
}
}
<file_sep>/app/src/main/java/com/example/mvvm_test_application/model/dagger_models/BindingModule.java
package com.example.mvvm_test_application.model.dagger_models;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.ViewGroup;
import androidx.databinding.DataBindingUtil;
import androidx.databinding.ViewDataBinding;
import androidx.fragment.app.FragmentActivity;
import com.example.mvvm_test_application.R;
import com.example.mvvm_test_application.databinding.FragmentCocktailBinding;
import com.example.mvvm_test_application.databinding.FragmentCocktailListBinding;
import com.example.mvvm_test_application.databinding.FragmentCocktailWebViewBinding;
import com.example.mvvm_test_application.databinding.FragmentDrinkTypeBinding;
import com.example.mvvm_test_application.databinding.FragmentDrinkViewPagerBinding;
import com.example.mvvm_test_application.databinding.ItemCocktailBinding;
import dagger.Module;
import dagger.Provides;
@Module(includes = ContextAndCallbacksModule.class)
public class BindingModule {
private ViewGroup viewGroup;
public BindingModule(ViewGroup viewGroup){
this.viewGroup = viewGroup;
}
@Provides
public FragmentCocktailBinding provideCocktailBinding(FragmentActivity activity){
return DataBindingUtil.inflate(LayoutInflater.from(activity), R.layout.fragment_cocktail,viewGroup,false);
}
@Provides
public FragmentCocktailListBinding provideCocktailListBinding(FragmentActivity activity){
return DataBindingUtil.inflate(LayoutInflater.from(activity),R.layout.fragment_cocktail_list,viewGroup,false);
}
@Provides
public FragmentDrinkTypeBinding provideDrinkTypeBindingBinding(FragmentActivity activity){
return DataBindingUtil.inflate(LayoutInflater.from(activity),R.layout.fragment_drink_type,viewGroup,false);
}
@Provides
public FragmentDrinkViewPagerBinding provideDrinkViewPagerBinding(FragmentActivity activity){
return DataBindingUtil.inflate(LayoutInflater.from(activity),R.layout.fragment_drink_view_pager,viewGroup,false);
}
@Provides
public ItemCocktailBinding provideItemCocktailBinding(FragmentActivity activity){
return DataBindingUtil.inflate(LayoutInflater.from(activity),R.layout.item_cocktail,viewGroup,false);
}
@Provides
public FragmentCocktailWebViewBinding provideWebViewBinding(FragmentActivity activity){
return DataBindingUtil.inflate(LayoutInflater.from(activity),R.layout.fragment_cocktail_web_view,viewGroup,false);
}
}
<file_sep>/app/src/main/java/com/example/mvvm_test_application/model/components/FragmentWebViewComponent.java
package com.example.mvvm_test_application.model.components;
import com.example.mvvm_test_application.model.dagger_models.BindingModule;
import com.example.mvvm_test_application.view.CocktailWebViewFragment;
import dagger.Component;
@Component(modules = BindingModule.class)
public interface FragmentWebViewComponent {
void inject(CocktailWebViewFragment cocktailWebViewFragment);
}
<file_sep>/app/src/main/java/com/example/mvvm_test_application/viewmodel/CocktailItemViewModel.java
package com.example.mvvm_test_application.viewmodel;
import android.content.Context;
import androidx.databinding.BaseObservable;
import androidx.databinding.Bindable;
import com.example.mvvm_test_application.R;
import com.example.mvvm_test_application.model.Cocktail;
import com.example.mvvm_test_application.view.MainActivity;
public class CocktailItemViewModel extends BaseObservable{
private Cocktail mCocktail;
public void setCocktail(Cocktail cocktail){
this.mCocktail=cocktail;
}
@Bindable
public Cocktail getCocktail() {
return mCocktail;
}
@Bindable
public String getCockTailName(){
return mCocktail.getName();
}
public String getCockTailStructure(Context context,String structure){
return context.getResources().getString(R.string.structure_title)+structure;
}
@Bindable
public String getCocktailAlcoholable() {
return mCocktail.getAlcoholable()+"%";
}
public String isHasIce(Context context,boolean isIce){
return (isIce ? context.getResources().getString(R.string.with_ice):
context.getResources().getString(R.string.without_ice));
}
}
<file_sep>/app/src/main/java/com/example/mvvm_test_application/utils/CocktailsAPI.java
package com.example.mvvm_test_application.utils;
import com.example.mvvm_test_application.model.Cocktail;
import java.util.List;
import retrofit2.Call;
import retrofit2.http.Field;
import retrofit2.http.FormUrlEncoded;
import retrofit2.http.GET;
import retrofit2.http.POST;
public interface CocktailsAPI {
@GET("/getCocktail.php")
Call<List<Cocktail>> getCocktails();
}
<file_sep>/app/src/main/java/com/example/mvvm_test_application/model/dagger_models/ViewModelsModule.java
package com.example.mvvm_test_application.model.dagger_models;
import android.content.Context;
import androidx.fragment.app.FragmentActivity;
import androidx.lifecycle.ViewModelProvider;
import androidx.lifecycle.ViewModelProviders;
import com.example.mvvm_test_application.view.CocktailFragment;
import com.example.mvvm_test_application.viewmodel.CocktailDataViewModel;
import com.example.mvvm_test_application.viewmodel.CocktailItemViewModel;
import com.example.mvvm_test_application.viewmodel.CocktailViewModel;
import com.example.mvvm_test_application.viewmodel.DrinkTypeViewModel;
import dagger.Module;
import dagger.Provides;
@Module(includes = ContextAndCallbacksModule.class)
public class ViewModelsModule {
@Provides
public CocktailDataViewModel provideCocktailDataViewModel(FragmentActivity activity) {
return ViewModelProviders.of(activity).get(CocktailDataViewModel.class);
}
@Provides
public CocktailItemViewModel provideCocktailItemViewModel(){
return new CocktailItemViewModel();
}
@Provides
public CocktailViewModel provideCocktailViewModel(CocktailViewModel.Callback callback){
CocktailViewModel model=new CocktailViewModel();
model.setCallback(callback);
return model;
}
@Provides
public DrinkTypeViewModel provideDrinkTypeViewModel(DrinkTypeViewModel.Callback callback){
return new DrinkTypeViewModel(callback);
}
}
<file_sep>/app/src/main/java/com/example/mvvm_test_application/model/dagger_models/ContextAndCallbacksModule.java
package com.example.mvvm_test_application.model.dagger_models;
import androidx.annotation.NonNull;
import androidx.fragment.app.FragmentActivity;
import com.example.mvvm_test_application.model.CocktailAdapter;
import com.example.mvvm_test_application.utils.DownloaderService;
import com.example.mvvm_test_application.viewmodel.CocktailViewModel;
import com.example.mvvm_test_application.viewmodel.DrinkTypeViewModel;
import dagger.Module;
import dagger.Provides;
@Module
public class ContextAndCallbacksModule {
@NonNull
private FragmentActivity activity;
public ContextAndCallbacksModule(FragmentActivity activity){
this.activity=activity;
}
@Provides
public DrinkTypeViewModel.Callback provideDrinkTypeViewModelCallback(){
return (DrinkTypeViewModel.Callback) activity;
}
@Provides
public CocktailViewModel.Callback provideCocktailViewModelCallback(){
return (CocktailViewModel.Callback) activity;
}
@Provides
public CocktailAdapter.Callback provideCocktailAdapterCallback(){
return (CocktailAdapter.Callback) activity;
}
@Provides
public DownloaderService.UILoadingCommander provideUILoadingCommander(){
return (DownloaderService.UILoadingCommander) activity;
}
@Provides
public FragmentActivity provideActivity(){
return activity;
}
}
<file_sep>/app/src/main/java/com/example/mvvm_test_application/model/components/DrinkTypeComponent.java
package com.example.mvvm_test_application.model.components;
import com.example.mvvm_test_application.databinding.FragmentDrinkTypeBinding;
import com.example.mvvm_test_application.model.dagger_models.BindingModule;
import com.example.mvvm_test_application.model.dagger_models.ViewModelsModule;
import com.example.mvvm_test_application.view.DrinkTypeFragment;
import com.example.mvvm_test_application.viewmodel.DrinkTypeViewModel;
import dagger.Component;
@Component(modules = {BindingModule.class, ViewModelsModule.class})
public interface DrinkTypeComponent {
void inject(DrinkTypeFragment fragment);
}
<file_sep>/app/src/main/java/com/example/mvvm_test_application/model/dagger_models/FragmentPagerAdapterModule.java
package com.example.mvvm_test_application.model.dagger_models;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentActivity;
import androidx.fragment.app.FragmentManager;
import androidx.fragment.app.FragmentPagerAdapter;
import androidx.fragment.app.FragmentStatePagerAdapter;
import com.example.mvvm_test_application.R;
import com.example.mvvm_test_application.view.CocktailListFragment;
import dagger.Module;
import dagger.Provides;
@Module(includes = ContextAndCallbacksModule.class)
public class FragmentPagerAdapterModule {
private FragmentManager manager;
public FragmentPagerAdapterModule(FragmentManager manager){
this.manager=manager;
}
@Provides
public FragmentPagerAdapter provideFragmentPagerAdapter(final FragmentActivity acivity) {
return new FragmentPagerAdapter(manager, FragmentStatePagerAdapter.BEHAVIOR_RESUME_ONLY_CURRENT_FRAGMENT) {
@NonNull
@Override
public Fragment getItem(int position) {
return CocktailListFragment.newInstance(position);
}
@Override
public int getCount() {
return 3;
}
@Nullable
@Override
public CharSequence getPageTitle(int position) {
switch (position) {
case 0: {
return acivity.getString(R.string.scotch);
}
case 1: {
return acivity.getString(R.string.vodka);
}
case 2: {
return acivity.getString(R.string.champagne);
}
}
return super.getPageTitle(position);
}
};
}
}
<file_sep>/app/src/main/java/com/example/mvvm_test_application/model/components/CocktailAdapterComponent.java
package com.example.mvvm_test_application.model.components;
import com.example.mvvm_test_application.model.CocktailAdapter;
import com.example.mvvm_test_application.model.dagger_models.BindingModule;
import com.example.mvvm_test_application.model.dagger_models.ContextAndCallbacksModule;
import dagger.Component;
@Component(modules = {BindingModule.class, ContextAndCallbacksModule.class})
public interface CocktailAdapterComponent {
void inject(CocktailAdapter adapter);
}
<file_sep>/settings.gradle
include ':app'
rootProject.name = "MVVM_test_application"<file_sep>/app/src/main/java/com/example/mvvm_test_application/model/Cocktail.java
package com.example.mvvm_test_application.model;
import com.google.gson.annotations.SerializedName;
public class Cocktail {
@SerializedName("name")
private String name;
@SerializedName("alcoholable")
private String alcoholable;
@SerializedName("structure")
private String structure;
@SerializedName("hasIce")
private boolean hasIce;
@SerializedName("type")
private String type;
@SerializedName("urlSite")
private String urlSite;
@SerializedName("urlImage")
private String urlImage;
public Cocktail(String name, String alcoholable, String structure, boolean hasIce, String type, String urlSite, String urlImage) {
this.name = name;
this.alcoholable = alcoholable;
this.structure = structure;
this.hasIce = hasIce;
this.type=type;
this.urlSite =urlSite;
this.urlImage = urlImage;
}
public String getName() {
return name;
}
public String getAlcoholable() {
return alcoholable;
}
public String getStructure() {
return structure;
}
public boolean isHasIce() {
return hasIce;
}
public void setName(String name) {
this.name = name;
}
public void setAlcoholable(String alcoholable) {
this.alcoholable = alcoholable;
}
public void setStructure(String structure) {
this.structure = structure;
}
public void setHasIce(boolean hasIce) {
this.hasIce = hasIce;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getUrlSite() {
return urlSite;
}
public void setUrlSite(String urlSite) {
this.urlSite = urlSite;
}
public String getUrlImage() {
return urlImage;
}
public void setUrlImage(String urlImage) {
this.urlImage = urlImage;
}
}
<file_sep>/app/src/main/java/com/example/mvvm_test_application/viewmodel/DrinkTypeViewModel.java
package com.example.mvvm_test_application.viewmodel;
import androidx.databinding.BaseObservable;
import androidx.lifecycle.ViewModel;
public class DrinkTypeViewModel extends BaseObservable {
private static final int CODE_SCOTCH = 0;
private static final int CODE_VODKA = 1;
private static final int CODE_CHAMPAGNE = 2;
private Callback callback;
public DrinkTypeViewModel(Callback callback){
this.callback = callback;
}
public void onScotchClick(){
callback.onDrinkTypeClicked(CODE_SCOTCH);
}
public void onVodkaClick(){
callback.onDrinkTypeClicked(CODE_VODKA);
}
public void onChampagneClick(){
callback.onDrinkTypeClicked(CODE_CHAMPAGNE);
}
public interface Callback{
void onDrinkTypeClicked(int position);
}
}
| 6eb3eb5cfd330de3ad1f00ebc2d33d48ef14bdff | [
"Java",
"Gradle"
] | 16 | Java | alexbur7/MVVM_bar_test_application | ee72dfc384eadfcf8228fefbf0cd137c6225b884 | 88b80476129cd4cd49171a98a60c375d24eb4ae9 |
refs/heads/master | <file_sep>import { Message } from 'discord.js';
import CommandStore from './command.store';
import Logger from '../util/logger';
export type DiscordMessageType = Message;
export type MessagePayloadType = {
readonly command: string;
readonly args: string[];
readonly source: DiscordMessageType;
};
export const adaptMessage = (
message: DiscordMessageType,
): MessagePayloadType => {
const [command, ...args] = message.content
.split(' ')
.map((token) => token.toLowerCase());
return {
command: command.substring(1),
args: args,
source: message,
};
};
export const handleMessage = async (payload: MessagePayloadType) => {
const fn = CommandStore.get(payload.command);
if (!fn) {
return;
}
await fn(payload).catch(Logger.error);
};
<file_sep>FROM node:15.3.0-alpine3.10
ENV NODE_ENV=production
WORKDIR /usr/src/cscareers-chanserv
COPY package.json .
RUN yarn
COPY . .
RUN yarn build
CMD ["node", "./build/index.js"]
<file_sep>import { MessagePayloadType } from '../messages';
export default async function help(payload: MessagePayloadType) {
await payload.source.reply(`**Available Commands:**
\`\`\`
!channels - Lists available community channels
!join channel_anme - Joins community channel
!leave channel_name - Leaves community channel
!create channel_name - Creates community channel
!invite user#1234 - Invites user to community channel
\`\`\`
`);
}
<file_sep>import { DiscordMessageType } from '../app/messages';
import SegmentClient from 'analytics-node';
import { TextChannel } from 'discord.js';
import Logger from './logger';
type EventType = 'bot_command_event' | 'community_message_event';
export const BOT_COMMAND_EVENT = 'bot_command_event';
export const COMMUNITY_MESSAGE_EVENT = 'community_message_event';
export interface IAnalytics {
emit(
message: DiscordMessageType,
event: EventType,
properties?: any,
): Promise<void>;
}
export class SandboxAnalytics implements IAnalytics {
async emit(message: DiscordMessageType, event: EventType, properties?: any) {
Logger.debug('[analytics emitted]: ', event, properties);
}
}
export class Analytics implements IAnalytics {
private _client;
constructor(token: string) {
this._client = new SegmentClient(token);
}
async emit(message: DiscordMessageType, event: EventType, properties?: any) {
const identifyUser = new Promise((resolve, reject) => {
this._client.identify(
{
userId: message.author.id,
timestamp: new Date(),
traits: {
name: message.author.tag,
},
},
(error) => {
if (error) {
reject(error);
} else {
resolve(true);
}
},
);
});
const trackEvent = new Promise((resolve, reject) => {
this._client.track(
{
userId: message.author.id,
event: event,
timestamp: new Date(),
properties: {
...(properties ? { ...properties } : {}),
channel: (message.channel as TextChannel).name,
},
},
(error) => {
if (error) {
reject(error);
} else {
resolve(true);
}
},
);
});
await Promise.all([identifyUser, trackEvent]);
}
}
<file_sep>const Constants = {
Prefix: '!',
BotCommandsChannel: 'bot_commands',
CommunityCategoryPrefix: 'community channels',
};
export default Constants;
<file_sep>import {
BOT_COMMANDS_CHANNEL,
fetchCommunityChannels,
isFromBotChannel,
} from '../channels';
import { MessagePayloadType } from '../messages';
const MAX_MESSAGE_LENGTH = 1750;
export default async function channels(payload: MessagePayloadType) {
if (!isFromBotChannel(payload.source)) {
await payload.source.reply(
`Please run this command in the #${BOT_COMMANDS_CHANNEL} channel`,
);
return;
}
const messages = fetchCommunityChannels(payload.source.guild)
.map((channel) => `${channel.name}\n`)
.reduce(
(acc: string[], channel) => {
const idx = acc.length - 1;
const lastMessage = acc[idx];
if (lastMessage.length + channel.length >= MAX_MESSAGE_LENGTH) {
acc.push(channel);
} else {
acc[idx] += channel;
}
return acc;
},
['**Available Channels:**\n'],
);
let shouldSendInChannel = false;
for (const message of messages) {
try {
// We initially try sending these messages via DMs but a user can have their DMs disabled.
await payload.source.author.send(message);
} catch {
shouldSendInChannel = true;
break;
}
}
if (shouldSendInChannel) {
for (const message of messages) {
await payload.source.reply(message);
}
}
const createChannelMessage = `Create your own channel via \`!create channel_name\` in #${BOT_COMMANDS_CHANNEL}`;
shouldSendInChannel
? await payload.source.reply(createChannelMessage)
: await payload.source.author.send(createChannelMessage);
}
<file_sep>import { TextChannel } from 'discord.js';
import { fetchChannelAdmins, isFromCommunityChannel } from '../channels';
import { MessagePayloadType } from '../messages';
import { fetchUser } from '../users';
export default async function promote(payload: MessagePayloadType) {
if (!isFromCommunityChannel(payload.source)) {
return;
}
// Currently only admins are allowed to promote a user to channel admin.
const isAdmin = Boolean(
payload.source.member?.hasPermission('ADMINISTRATOR'),
);
if (!isAdmin) {
await payload.source.reply('Insufficient permissions');
return;
}
const user = payload.source.content.split(' ').slice(1).join(' ');
if (!user) {
await payload.source.reply('Invalid usage: `!promote user#1234`');
return;
}
const isValidUser = Boolean(fetchUser(user, payload.source.guild));
if (!isValidUser) {
await payload.source.reply(
`Invalid username (case sensitive): \`${user}\``,
);
return;
}
const channel = payload.source.channel as TextChannel;
const channelAdmins = fetchChannelAdmins(channel);
await channel.setTopic(
`Channel Admins: ${[...channelAdmins, user].join(', ')}`,
);
await payload.source.reply(`Successfully promoted ${user} to channel admin`);
}
<file_sep>import { MessagePayloadType } from './messages';
import commands from './commands';
type CommandStoreType = Map<
string,
(payload: MessagePayloadType) => Promise<void>
>;
// TODO (joey.colon): Look into transferring into an immutable map.
const CommandStore: CommandStoreType = new Map(Object.entries(commands));
export default CommandStore;
<file_sep>import { TextChannel } from 'discord.js';
import Logger from '../../util/logger';
import { fetchChannelAdmins, isFromCommunityChannel } from '../channels';
import { MessagePayloadType } from '../messages';
import { fetchUser } from '../users';
export default async function invite(payload: MessagePayloadType) {
if (!isFromCommunityChannel(payload.source)) {
return;
}
const channel = payload.source.channel as TextChannel;
const channelAdmins = fetchChannelAdmins(channel);
const canInvite =
Boolean(payload.source.member?.hasPermission('ADMINISTRATOR')) ||
channelAdmins.includes(payload.source.author.tag);
if (!canInvite) {
await payload.source.reply('Insufficient permissions');
return;
}
const user = payload.source.content.split(' ').slice(1).join(' ');
if (!user) {
await payload.source.reply('Invalid usage: `!invite user#1234`');
return;
}
const targetUser = fetchUser(user, payload.source.guild);
if (!targetUser) {
await payload.source.reply(
`Invalid username (case sensitive): \`${user}\``,
);
return;
}
await channel
.updateOverwrite(targetUser, {
VIEW_CHANNEL: true,
SEND_MESSAGES: true,
READ_MESSAGE_HISTORY: true,
})
.then(
async () => await payload.source.reply(`Successfully invited ${user}`),
)
.catch(async (error) => {
Logger.error(error);
await payload.source.reply('Unable to leave channel :(');
});
}
<file_sep>import Bot from './app/bot';
Bot.run();
<file_sep>import { TextChannel, User } from 'discord.js';
import Logger from '../../util/logger';
import { Maybe } from '../../util/types';
import {
BOT_COMMANDS_CHANNEL,
fetchCommunityChannels,
INVALID_CHANNEL_NAMES,
isFromBotChannel,
stripChannelName,
} from '../channels';
import { MessagePayloadType } from '../messages';
import Fuse from 'fuse.js';
const JOIN_MESSAGES = [
'Have no fear, {user} is here!',
'{user} has entered the community.',
'{user} has just slid into the community.',
'Everyone say hello to {user}!',
'Wow, {user} just joined! Who brought the cake?!',
'FREEZE! {user} has entered the room.',
];
const generateJoinMessage = (user: User) =>
JOIN_MESSAGES[Math.floor(Math.random() * JOIN_MESSAGES.length)].replace(
'{user}',
user.toString(),
);
export default async function join(payload: MessagePayloadType) {
if (!isFromBotChannel(payload.source)) {
await payload.source.reply(
`Please run this command in the #${BOT_COMMANDS_CHANNEL} channel`,
);
return;
}
const channel = payload.args.join('_');
if (!channel) {
await payload.source.reply('Invalid usage: `!join channel_name`');
return;
}
const strippedChannelName = stripChannelName(channel);
if (INVALID_CHANNEL_NAMES.has(strippedChannelName)) {
await payload.source.reply('Invalid channel name');
return;
}
const communityChannels = fetchCommunityChannels(payload.source.guild);
const targetChannel = communityChannels.find(
({ channel }) => stripChannelName(channel.name) === strippedChannelName,
)?.channel as Maybe<TextChannel>;
if (!targetChannel) {
const fuse = new Fuse(
communityChannels.map((channel) => channel.name),
{ includeScore: true },
);
const suggestedChannels = fuse
.search(strippedChannelName)
.filter((channel) => Boolean(channel.score))
.map((channel) => channel.item);
await payload.source.reply(`
Unable to locate this channel
Did you mean? \`${suggestedChannels.join(', ')}\`
`);
return;
}
await targetChannel
.updateOverwrite(payload.source.author, {
VIEW_CHANNEL: true,
SEND_MESSAGES: true,
READ_MESSAGE_HISTORY: true,
})
.then(async () => {
await payload.source.react('✅');
await targetChannel.send(generateJoinMessage(payload.source.author));
})
.catch(async (error) => {
Logger.error(error);
await payload.source.reply('Unable to join this channel :(');
});
}
<file_sep>import Logger from '../util/logger';
import Environment from '../environment';
import { Client as DiscordClient, TextChannel } from 'discord.js';
import { adaptMessage, DiscordMessageType, handleMessage } from './messages';
import Constants from '../constants';
import { BOT_COMMANDS_CHANNEL, isFromCommunityChannel } from './channels';
import {
BOT_COMMAND_EVENT,
COMMUNITY_MESSAGE_EVENT,
Analytics as AnalyticsClient,
SandboxAnalytics as SandboxAnalyticsClient,
IAnalytics,
} from '../util/analytics';
async function run() {
const isProd = Environment.Playground === 'production';
if (!Environment.DiscordToken) {
throw new Error('Unable to locate Discord token');
}
if (!Environment.SegmentToken && isProd) {
throw new Error('Unable to locate Segment token');
}
const analyticsClient: IAnalytics = isProd
? new AnalyticsClient(Environment.SegmentToken!)
: new SandboxAnalyticsClient();
const discordClient = new DiscordClient({
presence: {
activity: {
name: '!help',
type: 'LISTENING',
},
},
});
discordClient
.login(Environment.DiscordToken)
.then(() => Logger.info('cscareers chanserv is now running!'))
.catch(Logger.error);
discordClient.on('message', async (message: DiscordMessageType) => {
const isBotCommandEvent =
!message.author.bot && message.content[0] === Constants.Prefix;
const events = [];
if (isBotCommandEvent) {
const payload = adaptMessage(message);
const channelName = (message.channel as TextChannel).name;
events.push(
handleMessage(payload),
channelName === BOT_COMMANDS_CHANNEL
? analyticsClient.emit(message, BOT_COMMAND_EVENT, {
command: payload.command,
args: payload.args,
})
: null,
);
} else if (isFromCommunityChannel(message)) {
events.push(analyticsClient.emit(message, COMMUNITY_MESSAGE_EVENT));
}
await Promise.all(events);
});
}
const Bot = { run };
export default Bot;
<file_sep>import { Guild, Snowflake } from 'discord.js';
import { Maybe } from '../util/types';
export const fetchUser = (
username: string,
guild: Maybe<Guild>,
): Maybe<Snowflake> =>
guild?.members.cache.find((member) => username === member.user.tag)?.user.id;
<file_sep>import { MessagePayloadType } from '../messages';
import channels from './channels.command';
import join from './join.command';
import leave from './leave.command';
import create from './create.command';
import invite from './invite.command';
import help from './help.command';
import promote from './promote.command';
import demote from './demote.command';
import highlight from './highlight.command';
import kick from './kick.command';
type CommandsType = {
channels: (payload: MessagePayloadType) => Promise<void>;
join: (payload: MessagePayloadType) => Promise<void>;
leave: (payload: MessagePayloadType) => Promise<void>;
create: (payload: MessagePayloadType) => Promise<void>;
invite: (payload: MessagePayloadType) => Promise<void>;
help: (payload: MessagePayloadType) => Promise<void>;
promote: (payload: MessagePayloadType) => Promise<void>;
demote: (payload: MessagePayloadType) => Promise<void>;
highlight: (payload: MessagePayloadType) => Promise<void>;
kick: (payload: MessagePayloadType) => Promise<void>;
};
export default {
channels,
join,
leave,
create,
invite,
help,
promote,
demote,
highlight,
kick,
} as CommandsType;
<file_sep>// @ts-nocheck
type LoggerType = {
info: (...args: any) => void;
debug: (...args: any) => void;
error: (...args: any) => void;
warn: (...args: any) => void;
};
// TODO(joey.colon): Set up winston logger.
function info(...args: any) {
console.log(...args);
}
function debug(...args: any) {
console.log(...args);
}
function error(...args: any) {
console.log(...args);
}
function warn(...args: any) {
console.log(...args);
}
const Logger: LoggerType = {
info,
debug,
error,
warn,
};
export default Logger;
<file_sep>import { CategoryChannel, Guild, GuildChannel, TextChannel } from 'discord.js';
import Constants from '../constants';
import { Maybe } from '../util/types';
import { DiscordMessageType } from './messages';
export type ChannelRequestType = {
readonly user: string;
readonly channelName: string;
};
type ChannelListType = {
readonly name: string;
readonly user_count: number;
readonly channel: GuildChannel;
};
export const INVALID_CHANNEL_NAMES = new Set(['instructions']);
export const BOT_COMMANDS_CHANNEL = Constants.BotCommandsChannel;
export const COMMUNITY_CATEGORY_PREFIX = Constants.CommunityCategoryPrefix;
const MAX_CHANNEL_LIMIT = 50;
export const createChannel = async (
request: ChannelRequestType,
guild: Maybe<Guild>,
) => {
if (!guild) {
throw new Error(`Unable to locate guild ${JSON.stringify(request)}`);
}
// Since Discord categories are only allowed to have at most 50 child-channels, we need to determine
// which available categories we can create this channel in.
const communityCategories = guild.channels.cache
.filter((channel) => channel.name.startsWith(COMMUNITY_CATEGORY_PREFIX))
.reduce((acc, channel) => {
acc[channel.name] = [1, channel as CategoryChannel];
return acc;
}, {} as Record<string, [number, CategoryChannel]>);
if (Object.keys(communityCategories).length === 0) {
throw new Error(
`Unable to locate community categories ${JSON.stringify(request)}`,
);
}
fetchCommunityChannels(guild)
.map((channel) => channel.channel)
.forEach((channel) => {
const categoryName = guild.channels.cache
.get(channel.parentID || '')
?.name.toLowerCase();
if (!categoryName) {
return;
}
const [channelCount, categoryChannel] = communityCategories[categoryName];
communityCategories[categoryName] = [channelCount + 1, categoryChannel];
});
const availableCategoryName = Object.keys(communityCategories).find(
(category) => {
const [channelCount] = communityCategories[category];
return channelCount < MAX_CHANNEL_LIMIT;
},
);
if (!availableCategoryName) {
throw new Error(
`Unable to find a non-full category ${JSON.stringify(request)}`,
);
}
const [, communityCategory] = communityCategories[availableCategoryName];
return await guild.channels.create(request.channelName, {
type: 'text',
topic: `Channel Admins: ${request.user}`,
parent: communityCategory,
});
};
export const isFromBotChannel = (message: DiscordMessageType) =>
Boolean(
message.guild?.channels.cache.some(
({ name, id }) =>
name === BOT_COMMANDS_CHANNEL && id === message.channel.id,
),
);
export const isFromCommunityChannel = (message: DiscordMessageType) =>
fetchCommunityChannels(message.guild).some(
({ channel }) => channel.id === message.channel.id,
);
// For the time being we use channel topics as the source of truth for who channel admins are.
// Eventually we'll migrate to some datastore in order for channel admins to use channel topics
// as a medium of communication to channel members.
export const fetchChannelAdmins = (channel: TextChannel) =>
Boolean(channel.topic)
? (channel.topic?.split(':')[1] || '')
.split(',')
.map((user) => user.trim())
.filter(Boolean)
: [];
export const fetchCommunityChannels = (
guild: Maybe<Guild>,
): ChannelListType[] =>
guild?.channels.cache
.filter((channel) => {
const { parentID } = channel;
if (!parentID) {
return false;
}
const categoryName = (
guild.channels.cache.get(parentID)?.name || ''
).toLowerCase();
if (!categoryName) {
return false;
}
return categoryName.startsWith(COMMUNITY_CATEGORY_PREFIX);
})
.map((channel) => ({
name: channel.name,
// TODO (joey.colon): `user_count` is currently inaccurate. Since we use Discord overrides to assign members to channels,
// typical `members.size` returns only the count of admins online in each channel and not accurate total users.
// Investigate other methods of determining user count.
user_count: channel.members.size,
channel: channel,
}))
.filter(
(channel) => !INVALID_CHANNEL_NAMES.has(stripChannelName(channel.name)),
)
.sort((a, b) => (a.name > b.name ? 1 : -1)) || [];
export const stripChannelName = (input: string) =>
input.replace('#', '').replace(/ /g, '_').toLowerCase();
<file_sep>import { MessageReaction, TextChannel } from 'discord.js';
import Logger from '../../util/logger';
import { Maybe } from '../../util/types';
import {
BOT_COMMANDS_CHANNEL,
ChannelRequestType,
createChannel,
fetchCommunityChannels,
INVALID_CHANNEL_NAMES,
isFromBotChannel,
stripChannelName,
} from '../channels';
import { MessagePayloadType } from '../messages';
const PENDING_COMMUNITY_CHANNELS = 'pending_community_channels';
export default async function create(payload: MessagePayloadType) {
if (!isFromBotChannel(payload.source) || !payload.source.guild) {
await payload.source.reply(
`Please run this command in the #${BOT_COMMANDS_CHANNEL} channel`,
);
return;
}
const communityChannels = fetchCommunityChannels(payload.source.guild);
const requestedChannelName = stripChannelName(
payload.args.map((segment) => segment.toLowerCase()).join('_'),
);
const channelExists = Boolean(
communityChannels.some(
(channel) => stripChannelName(channel.name) === requestedChannelName,
) || payload.source.mentions.channels.size,
);
if (INVALID_CHANNEL_NAMES.has(requestedChannelName)) {
await payload.source.reply('Invalid channel name');
return;
}
if (channelExists) {
await payload.source.reply('This channel already exists');
return;
}
const pendingChannel = payload.source.guild.channels.cache.find(
(channel) => channel.name === PENDING_COMMUNITY_CHANNELS,
) as Maybe<TextChannel>;
if (!pendingChannel) {
await payload.source.reply('Channel requests are currently disabled.');
return;
}
await payload.source.reply(
'Your request has been submitted to the admins. We will contact you with next steps :)',
);
const channelRequest: ChannelRequestType = {
user: payload.source.author.tag,
channelName: requestedChannelName,
};
const pendingSubmission = await pendingChannel.send(
JSON.stringify(channelRequest, null, 2),
);
await pendingSubmission
.awaitReactions(
(reaction: MessageReaction) =>
['👍', '👎'].includes(reaction.emoji.name) &&
reaction.message.id === pendingSubmission.id,
{ max: 1, time: 86400000, errors: ['time'] },
)
.then(async (response) => {
const reaction = response.first();
if (reaction?.emoji.name === '👍') {
const newChannel = await createChannel(
channelRequest,
payload.source.guild,
);
await newChannel.updateOverwrite(payload.source.author, {
VIEW_CHANNEL: true,
SEND_MESSAGES: true,
READ_MESSAGE_HISTORY: true,
});
await payload.source.reply('🥳🥳 Your channel has been approved!');
} else {
await payload.source.reply(
'Your channel has been not been approved. Contact admins for further explanation.',
);
}
})
.catch(Logger.error);
}
<file_sep>import Logger from '../../util/logger';
import {
BOT_COMMANDS_CHANNEL,
fetchCommunityChannels,
isFromBotChannel,
stripChannelName,
} from '../channels';
import { MessagePayloadType } from '../messages';
export default async function leave(payload: MessagePayloadType) {
if (!isFromBotChannel(payload.source)) {
await payload.source.reply(
`Please run this command in the #${BOT_COMMANDS_CHANNEL} channel`,
);
return;
}
const channel = payload.args.join('_');
if (!channel) {
await payload.source.reply('Invalid usage: `!leave channel_name`');
return;
}
const strippedChannelName = stripChannelName(channel);
const communityChannels = fetchCommunityChannels(payload.source.guild);
const targetChannel = communityChannels.find(
({ channel }) => stripChannelName(channel.name) === strippedChannelName,
)?.channel;
if (!targetChannel) {
await payload.source.reply('Unable to locate this channel');
return;
}
await targetChannel
.updateOverwrite(payload.source.author, {
VIEW_CHANNEL: false,
SEND_MESSAGES: false,
READ_MESSAGE_HISTORY: false,
})
.then(async () => await payload.source.react('✅'))
.catch(async (error) => {
Logger.error(error);
await payload.source.reply('Unable to leave channel :(');
});
}
<file_sep>import { TextChannel } from 'discord.js';
import { fetchChannelAdmins, isFromCommunityChannel } from '../channels';
import { MessagePayloadType } from '../messages';
export default async function highlight(payload: MessagePayloadType) {
if (!isFromCommunityChannel(payload.source)) {
return;
}
const channel = payload.source.channel as TextChannel;
const channelAdmins = fetchChannelAdmins(channel);
const hasPermission =
Boolean(payload.source.member?.hasPermission('ADMINISTRATOR')) ||
channelAdmins.includes(payload.source.author.tag);
if (!hasPermission) {
await payload.source.reply('Insufficient permissions');
return;
}
await payload.source.channel.send('@here');
}
<file_sep><p align="center">
<img src="https://i.imgur.com/3XVmGsH.png" alt="Chanserv">
</p>
# Introduction
Chanserv is a Discord bot that empowers sub-communities to be built within a Discord server. This bot was initially created to help connect individuals in the cscareers.dev Discord server during the COVID-19 pandemic by allowing users to request channels based around different topics such as location, company, common interests, etc. As the name suggests, this bot was heavily inspired off of a typical [chanserv bot](http://www.geekshed.net/commands/chanserv/) from IRC.
Chanserv is currently supporting three roles - server admin, channel admin, and channel member. Server admins can be thought of as the staff running the server. Channel admins can be thought of as an administrator scoped locally to a community channel. Channel members are members that have joined a specific community channel.
In a community channel, there can exist many channel admins who are assigned by server admins. Channel admins have the ability to invite, kick, and highlight their respective channel.
# Supported Commands
## User commands:
- `!help` - View currently supported commands accessible by anyone.
- `!channels` - View community channels that are publicly available to be joined
- `!join channel_name` - Joins a specific community channel
- `!leave channel_name` - Leaves a specific community channel
- `!create channel_name` - Submits a request to create a community channel
## Channel admin commands:
- `!kick username#1234` - Removes a user from the respective community channel
- `!invite username#1234` - Invites a user to the respective community channel
- `!highlight` - sends a `@here` Ping to the respective community channel
## Server admin commands:
- `!promote username#1234` - Promotes a user to channel admin in respective community channel
- `!demote username#1234` - Demotes a user to channel member in respective community channel
# Contributing
While requirements will change, we will create issues on an as needed basis. We are also open to your suggestions! You may either create an issue with your suggestion or you can create a suggestion in the [cscareers.dev Discord server](https://cscareers.dev/discord).
| ad21995b53e8ff3bad2e7498591b50571451d672 | [
"Markdown",
"TypeScript",
"Dockerfile"
] | 20 | TypeScript | cscareers-dev/discord-chanserv | a242020b4ac5b31c473344755808a777fbbc2d4e | 1c11bddeeba97f934fff757840e0b68605f09c9e |
refs/heads/master | <repo_name>mindis/Simple-Meetup-Client<file_sep>/README.md
Simple-Meetup-Client
======================
A simple Meetup client to be run on local IDE (Eclipse/IntelliJ). It connects to the Meetup API at https://api.meetup.com/, gets the groups for a topic of interest, and then gets past and upcoming events for all those groups. It then creates an output file with the group + event data, to be further used in other projects.
Please download latest version of maven to run mvn commands from command-line, or import it as a maven project in your IDE (provided maven plug-in is present). Please run "mvn clean install" and "mvn eclipse:eclipse" if you're running from a command line, and then import the project in your IDE.
Once the project is setup in IDE, you may run the class MeetupDataCollector as a Java application. To run the program, you'll need to pass two attributes - your API key which you should get from https://secure.meetup.com/meetup_api/key/, and a topic of interest like "java". The program can take a while to complete if the number of groups interested in the topic are large.
<file_sep>/src/main/java/com/sapient/meetupclient/DataTuple.java
package com.sapient.meetupclient;
/**
* A simple tuple object, which can be used to map a key to a value
*
* @author abhinavg6
*
* @param <L>
* @param <R>
*/
public class DataTuple<L, R> {
private final L key;
private final R value;
public DataTuple(L key, R value) {
this.key = key;
this.value = value;
}
public L getKey() {
return key;
}
public R getValue() {
return value;
}
@Override
public int hashCode() {
return key.hashCode() ^ value.hashCode();
}
@Override
public boolean equals(Object o) {
if (o == null)
return false;
if (!(o instanceof DataTuple))
return false;
DataTuple<?, ?> tuple = (DataTuple<?, ?>) o;
return this.key.equals(tuple.getKey())
&& this.value.equals(tuple.getValue());
}
}
<file_sep>/src/main/java/com/sapient/meetupclient/MeetupJDOMClient.java
package com.sapient.meetupclient;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.filter.Filters;
import org.jdom2.input.DOMBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.xml.sax.SAXException;
/**
* A simple JDOM based client for Meetup groups and events API
*
* @author abhinavg6
*
*/
public class MeetupJDOMClient {
private static final Logger logger = LogManager
.getLogger(MeetupJDOMClient.class);
private String apiKey;
private String apiToken;
private String uri;
public void initClient(String apiKey, String apiToken, String uri) {
this.apiKey = apiKey;
this.apiToken = apiToken;
this.uri = uri;
}
/**
* Get a list of groups for the topic of interest
*/
public Map<Integer, String> getSeedData(String resourcePath,
String resourceAttrs) {
Map<Integer, String> seedData = new HashMap<Integer, String>();
// Get all group items from the XML
List<Element> elements = getElementsFromXML(resourcePath,
resourceAttrs, "//items/item");
for (Element element : elements) {
Integer groupId = null;
String groupName = null;
List<Element> children = element.getChildren();
for (Element childElement : children) {
if (childElement.getName().equalsIgnoreCase("id")) {
// Get the group id
groupId = Integer.parseInt(childElement.getValue());
}
if (childElement.getName().equalsIgnoreCase("name")) {
// Get the group name
groupName = childElement.getValue();
}
}
// Add each group id and name to be returned as seed data
if ((null != groupId) || (null != groupName)) {
seedData.put(groupId, groupName);
}
}
return seedData;
}
/**
* Get a list of past and upcoming events for a group
*/
public Map<String, List<DataTuple<String, String>>> getDataTable(
String resourcePath, String resourceAttrs) {
Map<String, List<DataTuple<String, String>>> dataTable = new HashMap<String, List<DataTuple<String, String>>>();
// Get all event items from the XML
List<Element> elements = getElementsFromXML(resourcePath,
resourceAttrs, "//items/item");
for (Element element : elements) {
List<DataTuple<String, String>> tuples = new ArrayList<DataTuple<String, String>>();
String eventId = null;
List<Element> children = element.getChildren();
for (Element childElement : children) {
if (childElement.getName().equalsIgnoreCase("id")) {
// Get the event id
eventId = childElement.getValue();
}
if (childElement.getName().equalsIgnoreCase("status")) {
// Add the event status tuple
tuples.add(new DataTuple<String, String>("status",
childElement.getValue()));
}
if (childElement.getName().equalsIgnoreCase("name")) {
// Add the event name tuple
tuples.add(new DataTuple<String, String>("name",
childElement.getValue()));
}
if (childElement.getName().equalsIgnoreCase("venue")) {
List<Element> grandChildren = childElement.getChildren();
// Get the event venue
for (Element grandChild : grandChildren) {
if (grandChild.getName().equalsIgnoreCase("city")) {
// Add the venue city tuple
tuples.add(new DataTuple<String, String>("city",
grandChild.getValue()));
}
if (grandChild.getName().equalsIgnoreCase("country")) {
// Add the venue country tuple
tuples.add(new DataTuple<String, String>("country",
grandChild.getValue()));
}
}
}
}
// Add each event id with its list of tuples
if ((null != eventId) && (tuples.size() != 0)) {
dataTable.put(eventId, tuples);
}
}
return dataTable;
}
/**
* Source an XML from web API, and get a list of parent nodes for an xpath
*
* @param resourcePath
* @param resourceAttrs
* @param xpath
* @return
*/
private List<Element> getElementsFromXML(String resourcePath,
String resourceAttrs, String xpath) {
List<Element> elements = new ArrayList<Element>();
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
try {
DocumentBuilder domBuilder = factory.newDocumentBuilder();
String completeUri = createCompleteUri(resourcePath, resourceAttrs);
// Create the w3c Document from the complete API URI
org.w3c.dom.Document w3cDocument = domBuilder.parse(completeUri);
DOMBuilder jdomBuilder = new DOMBuilder();
// Create the JDOM document from w3c document
Document jdomDocument = jdomBuilder.build(w3cDocument);
XPathFactory xFactory = XPathFactory.instance();
// Create an xpath expression to get data of interest
XPathExpression<Element> expr = xFactory.compile(xpath,
Filters.element());
// Get the elements based on the xpath
elements = expr.evaluate(jdomDocument);
} catch (ParserConfigurationException e) {
logger.error("ParserConfigurationException -- " + e.getMessage());
} catch (SAXException e) {
logger.error("SAXException -- " + e.getMessage());
} catch (IOException e) {
logger.error("IOException -- " + e.getMessage());
}
return elements;
}
/**
* Create a complete API URL from resource path and attributes
*
* @param resourcePath
* @param resourceAttrs
* @return
*/
private String createCompleteUri(String resourcePath, String resourceAttrs) {
return this.uri + resourcePath + "?" + this.apiKey + "="
+ this.apiToken + "&" + resourceAttrs;
}
}
| 9545e0fe12eb6e8012bfe017586d6d5ee49a7d0f | [
"Markdown",
"Java"
] | 3 | Markdown | mindis/Simple-Meetup-Client | d1a0e5513cd546e0eafd33841c18d41f4e0479e5 | 3a1db2977290346efa6f4135e5b171c2da58def7 |
refs/heads/master | <file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"errors"
"fmt"
"github.com/Azure/azure-storage-azcopy/common"
)
// -------------------------------------- Implemented Enumerators -------------------------------------- \\
// download implies transferring from a remote resource to the local disk
// in this scenario, the destination is scanned/indexed first
// then the source is scanned and filtered based on what the destination contains
// we do the local one first because it is assumed that local file systems will be faster to enumerate than remote resources
func newSyncDownloadEnumerator(cca *cookedSyncCmdArgs) (enumerator *syncEnumerator, err error) {
destinationTraverser, err := newLocalTraverserForSync(cca, false)
if err != nil {
return nil, err
}
sourceTraverser, err := newBlobTraverserForSync(cca, true)
if err != nil {
return nil, err
}
// verify that the traversers are targeting the same type of resources
_, isSingleBlob := sourceTraverser.getPropertiesIfSingleBlob()
_, isSingleFile, _ := destinationTraverser.getInfoIfSingleFile()
if isSingleBlob != isSingleFile {
return nil, errors.New("sync must happen between source and destination of the same type: either blob <-> file, or container/virtual directory <-> local directory")
}
transferScheduler := newSyncTransferProcessor(cca, NumOfFilesPerDispatchJobPart, isSingleBlob && isSingleFile)
includeFilters := buildIncludeFilters(cca.include)
excludeFilters := buildExcludeFilters(cca.exclude)
// set up the filters in the right order
filters := append(includeFilters, excludeFilters...)
// set up the comparator so that the source/destination can be compared
indexer := newObjectIndexer()
comparator := newSyncSourceComparator(indexer, transferScheduler.scheduleCopyTransfer)
finalize := func() error {
// remove the extra files at the destination that were not present at the source
// we can only know what needs to be deleted when we have FINISHED traversing the remote source
// since only then can we know which local files definitely don't exist remotely
deleteScheduler := newSyncLocalDeleteProcessor(cca)
err = indexer.traverse(deleteScheduler.removeImmediately, nil)
if err != nil {
return err
}
// let the deletions happen first
// otherwise if the final part is executed too quickly, we might quit before deletions could finish
jobInitiated, err := transferScheduler.dispatchFinalPart()
if err != nil {
return err
}
quitIfInSync(jobInitiated, cca.getDeletionCount() > 0, cca)
cca.setScanningComplete()
return nil
}
return newSyncEnumerator(destinationTraverser, sourceTraverser, indexer, filters,
comparator.processIfNecessary, finalize), nil
}
// upload implies transferring from a local disk to a remote resource
// in this scenario, the local disk (source) is scanned/indexed first
// then the destination is scanned and filtered based on what the destination contains
// we do the local one first because it is assumed that local file systems will be faster to enumerate than remote resources
func newSyncUploadEnumerator(cca *cookedSyncCmdArgs) (enumerator *syncEnumerator, err error) {
sourceTraverser, err := newLocalTraverserForSync(cca, true)
if err != nil {
return nil, err
}
destinationTraverser, err := newBlobTraverserForSync(cca, false)
if err != nil {
return nil, err
}
// verify that the traversers are targeting the same type of resources
_, isSingleBlob := destinationTraverser.getPropertiesIfSingleBlob()
_, isSingleFile, _ := sourceTraverser.getInfoIfSingleFile()
if isSingleBlob != isSingleFile {
return nil, errors.New("sync must happen between source and destination of the same type: either blob <-> file, or container/virtual directory <-> local directory")
}
transferScheduler := newSyncTransferProcessor(cca, NumOfFilesPerDispatchJobPart, isSingleBlob && isSingleFile)
includeFilters := buildIncludeFilters(cca.include)
excludeFilters := buildExcludeFilters(cca.exclude)
// set up the filters in the right order
filters := append(includeFilters, excludeFilters...)
// set up the comparator so that the source/destination can be compared
indexer := newObjectIndexer()
destinationCleaner, err := newSyncBlobDeleteProcessor(cca)
if err != nil {
return nil, fmt.Errorf("unable to instantiate destination cleaner due to: %s", err.Error())
}
// when uploading, we can delete remote objects immediately, because as we traverse the remote location
// we ALREADY have available a complete map of everything that exists locally
// so as soon as we see a remote destination object we can know whether it exists in the local source
comparator := newSyncDestinationComparator(indexer, transferScheduler.scheduleCopyTransfer, destinationCleaner.removeImmediately)
finalize := func() error {
// schedule every local file that doesn't exist at the destination
err = indexer.traverse(transferScheduler.scheduleCopyTransfer, filters)
if err != nil {
return err
}
jobInitiated, err := transferScheduler.dispatchFinalPart()
if err != nil {
return err
}
quitIfInSync(jobInitiated, cca.getDeletionCount() > 0, cca)
cca.setScanningComplete()
return nil
}
return newSyncEnumerator(sourceTraverser, destinationTraverser, indexer, filters,
comparator.processIfNecessary, finalize), nil
}
func quitIfInSync(transferJobInitiated, anyDestinationFileDeleted bool, cca *cookedSyncCmdArgs) {
if !transferJobInitiated && !anyDestinationFileDeleted {
cca.reportScanningProgress(glcm, 0)
glcm.Exit(func(format common.OutputFormat) string {
return "The source and destination are already in sync."
}, common.EExitCode.Success())
} else if !transferJobInitiated && anyDestinationFileDeleted {
// some files were deleted but no transfer scheduled
cca.reportScanningProgress(glcm, 0)
glcm.Exit(func(format common.OutputFormat) string {
return "The source and destination are now in sync."
}, common.EExitCode.Success())
}
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"github.com/Azure/azure-storage-azcopy/common"
chk "gopkg.in/check.v1"
"strings"
)
func (s *cmdIntegrationSuite) TestRemoveSingleFile(c *chk.C) {
fsu := getFSU()
shareURL, shareName := createNewAzureShare(c, fsu)
defer deleteShare(c, shareURL)
for _, fileName := range []string{"top/mid/low/singlefileisbest", "打麻将.txt", "%4509%4254$85140&"} {
// set up the share with a single file
fileList := []string{fileName}
scenarioHelper{}.generateAzureFilesFromList(c, shareURL, fileList)
c.Assert(shareURL, chk.NotNil)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawFileURLWithSAS := scenarioHelper{}.getRawFileURLWithSAS(c, shareName, fileList[0])
raw := getDefaultRemoveRawInput(rawFileURLWithSAS.String(), true)
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// note that when we are targeting single files, the relative path is empty ("") since the root path already points to the file
validateRemoveTransfersAreScheduled(c, true, []string{""}, mockedRPC)
})
}
}
func (s *cmdIntegrationSuite) TestRemoveFilesUnderShare(c *chk.C) {
fsu := getFSU()
// set up the share with numerous files
shareURL, shareName := createNewAzureShare(c, fsu)
defer deleteShare(c, shareURL)
fileList := scenarioHelper{}.generateCommonRemoteScenarioForAzureFile(c, shareURL, "")
c.Assert(shareURL, chk.NotNil)
c.Assert(len(fileList), chk.Not(chk.Equals), 0)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawShareURLWithSAS := scenarioHelper{}.getRawShareURLWithSAS(c, shareName)
raw := getDefaultRemoveRawInput(rawShareURLWithSAS.String(), false)
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(fileList))
// validate that the right transfers were sent
validateRemoveTransfersAreScheduled(c, true, fileList, mockedRPC)
})
// turn off recursive, this time only top files should be deleted
raw.recursive = false
mockedRPC.reset()
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
c.Assert(len(mockedRPC.transfers), chk.Not(chk.Equals), len(fileList))
for _, transfer := range mockedRPC.transfers {
c.Assert(strings.Contains(transfer.Source, common.AZCOPY_PATH_SEPARATOR_STRING), chk.Equals, false)
}
})
}
func (s *cmdIntegrationSuite) TestRemoveFilesUnderDirectory(c *chk.C) {
fsu := getFSU()
dirName := "dir1/dir2/dir3/"
// set up the share with numerous files
shareURL, shareName := createNewAzureShare(c, fsu)
defer deleteShare(c, shareURL)
fileList := scenarioHelper{}.generateCommonRemoteScenarioForAzureFile(c, shareURL, dirName)
c.Assert(shareURL, chk.NotNil)
c.Assert(len(fileList), chk.Not(chk.Equals), 0)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawDirectoryURLWithSAS := scenarioHelper{}.getRawFileURLWithSAS(c, shareName, dirName)
raw := getDefaultRemoveRawInput(rawDirectoryURLWithSAS.String(), false)
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(fileList))
// validate that the right transfers were sent
expectedTransfers := scenarioHelper{}.shaveOffPrefix(fileList, dirName)
validateRemoveTransfersAreScheduled(c, true, expectedTransfers, mockedRPC)
})
// turn off recursive, this time only top files should be deleted
raw.recursive = false
mockedRPC.reset()
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
c.Assert(len(mockedRPC.transfers), chk.Not(chk.Equals), len(fileList))
for _, transfer := range mockedRPC.transfers {
c.Assert(strings.Contains(transfer.Source, common.AZCOPY_PATH_SEPARATOR_STRING), chk.Equals, false)
}
})
}
// include flag limits the scope of the delete
func (s *cmdIntegrationSuite) TestRemoveFilesWithIncludeFlag(c *chk.C) {
fsu := getFSU()
// set up the share with numerous files
shareURL, shareName := createNewAzureShare(c, fsu)
fileList := scenarioHelper{}.generateCommonRemoteScenarioForAzureFile(c, shareURL, "")
defer deleteShare(c, shareURL)
c.Assert(shareURL, chk.NotNil)
c.Assert(len(fileList), chk.Not(chk.Equals), 0)
// add special files that we wish to include
filesToInclude := []string{"important.pdf", "includeSub/amazing.jpeg", "exactName"}
scenarioHelper{}.generateAzureFilesFromList(c, shareURL, filesToInclude)
includeString := "*.pdf;*.jpeg;exactName"
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawShareURLWithSAS := scenarioHelper{}.getRawShareURLWithSAS(c, shareName)
raw := getDefaultRemoveRawInput(rawShareURLWithSAS.String(), false)
raw.include = includeString
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", filesToInclude, mockedRPC)
})
}
// exclude flag limits the scope of the delete
func (s *cmdIntegrationSuite) TestRemoveFilesWithExcludeFlag(c *chk.C) {
fsu := getFSU()
// set up the share with numerous files
shareURL, shareName := createNewAzureShare(c, fsu)
fileList := scenarioHelper{}.generateCommonRemoteScenarioForAzureFile(c, shareURL, "")
defer deleteShare(c, shareURL)
c.Assert(shareURL, chk.NotNil)
c.Assert(len(fileList), chk.Not(chk.Equals), 0)
// add special files that we wish to exclude
filesToExclude := []string{"notGood.pdf", "excludeSub/lame.jpeg", "exactName"}
scenarioHelper{}.generateAzureFilesFromList(c, shareURL, filesToExclude)
excludeString := "*.pdf;*.jpeg;exactName"
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawShareURLWithSAS := scenarioHelper{}.getRawShareURLWithSAS(c, shareName)
raw := getDefaultRemoveRawInput(rawShareURLWithSAS.String(), false)
raw.exclude = excludeString
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", fileList, mockedRPC)
})
}
// include and exclude flag can work together to limit the scope of the delete
func (s *cmdIntegrationSuite) TestRemoveFilesWithIncludeAndExcludeFlag(c *chk.C) {
fsu := getFSU()
// set up the share with numerous files
shareURL, shareName := createNewAzureShare(c, fsu)
fileList := scenarioHelper{}.generateCommonRemoteScenarioForAzureFile(c, shareURL, "")
defer deleteShare(c, shareURL)
c.Assert(shareURL, chk.NotNil)
c.Assert(len(fileList), chk.Not(chk.Equals), 0)
// add special files that we wish to include
filesToInclude := []string{"important.pdf", "includeSub/amazing.jpeg"}
scenarioHelper{}.generateAzureFilesFromList(c, shareURL, filesToInclude)
includeString := "*.pdf;*.jpeg;exactName"
// add special files that we wish to exclude
// note that the excluded files also match the include string
filesToExclude := []string{"sorry.pdf", "exclude/notGood.jpeg", "exactName", "sub/exactName"}
scenarioHelper{}.generateAzureFilesFromList(c, shareURL, filesToExclude)
excludeString := "so*;not*;exactName"
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawShareURLWithSAS := scenarioHelper{}.getRawShareURLWithSAS(c, shareName)
raw := getDefaultRemoveRawInput(rawShareURLWithSAS.String(), false)
raw.include = includeString
raw.exclude = excludeString
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", filesToInclude, mockedRPC)
})
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-azcopy/ste"
)
// Global singleton for sending RPC requests from the frontend to the STE
var Rpc = func(cmd common.RpcCmd, request interface{}, response interface{}) {
err := inprocSend(cmd, request, response)
common.PanicIfErr(err)
}
// Send method on HttpClient sends the data passed in the interface for given command type to the client url
func inprocSend(rpcCmd common.RpcCmd, requestData interface{}, responseData interface{}) error {
switch rpcCmd {
case common.ERpcCmd.CopyJobPartOrder():
*(responseData.(*common.CopyJobPartOrderResponse)) = ste.ExecuteNewCopyJobPartOrder(*requestData.(*common.CopyJobPartOrderRequest))
case common.ERpcCmd.ListJobs():
*(responseData.(*common.ListJobsResponse)) = ste.ListJobs()
case common.ERpcCmd.ListJobSummary():
*(responseData.(*common.ListJobSummaryResponse)) = ste.GetJobSummary(*requestData.(*common.JobID))
case common.ERpcCmd.ListSyncJobSummary():
*(responseData.(*common.ListSyncJobSummaryResponse)) = ste.GetSyncJobSummary(*requestData.(*common.JobID))
case common.ERpcCmd.ListJobTransfers():
*(responseData.(*common.ListJobTransfersResponse)) = ste.ListJobTransfers(requestData.(common.ListJobTransfersRequest))
case common.ERpcCmd.PauseJob():
responseData = ste.CancelPauseJobOrder(requestData.(common.JobID), common.EJobStatus.Paused())
case common.ERpcCmd.CancelJob():
*(responseData.(*common.CancelPauseResumeResponse)) = ste.CancelPauseJobOrder(requestData.(common.JobID), common.EJobStatus.Cancelled())
case common.ERpcCmd.ResumeJob():
*(responseData.(*common.CancelPauseResumeResponse)) = ste.ResumeJobOrder(*requestData.(*common.ResumeJobRequest))
case common.ERpcCmd.GetJobFromTo():
*(responseData.(*common.GetJobFromToResponse)) = ste.GetJobFromTo(*requestData.(*common.GetJobFromToRequest))
default:
panic(fmt.Errorf("Unrecognized RpcCmd: %q", rpcCmd.String()))
}
return nil
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// NewHttpClient returns the instance of struct containing an instance of http.client and url
func NewHttpClient(url string) *HTTPClient {
return &HTTPClient{
client: &http.Client{},
url: url,
}
}
// todo : use url in case of string
type HTTPClient struct {
client *http.Client
url string
}
// Send method on HttpClient sends the data passed in the interface for given command type to the client url
func (httpClient *HTTPClient) send(rpcCmd common.RpcCmd, requestData interface{}, responseData interface{}) error {
// Create HTTP request with command in query parameter & request data as JSON payload
requestJson, err := json.Marshal(requestData)
if err != nil {
return fmt.Errorf("error marshalling request payload for command type %q", rpcCmd.String())
}
request, err := http.NewRequest("POST", httpClient.url, bytes.NewReader(requestJson))
// adding the commandType as a query param
q := request.URL.Query()
q.Add("commandType", rpcCmd.String())
request.URL.RawQuery = q.Encode()
response, err := httpClient.client.Do(request)
if err != nil {
return err
}
// Read response data, deserialie it and return it (via out responseData parameter) & error
responseJson, err := ioutil.ReadAll(response.Body)
response.Body.Close()
if err != nil {
return fmt.Errorf("error reading response for the request")
}
err = json.Unmarshal(responseJson, responseData)
common.PanicIfErr(err)
return nil
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"bytes"
"context"
"errors"
"fmt"
"github.com/Azure/azure-storage-azcopy/ste"
"io/ioutil"
"net/url"
"os"
"runtime"
"strings"
"testing"
"time"
chk "gopkg.in/check.v1"
"math/rand"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/Azure/azure-storage-file-go/azfile"
minio "github.com/minio/minio-go"
)
// Hookup to the testing framework
func Test(t *testing.T) { chk.TestingT(t) }
type cmdIntegrationSuite struct{}
var _ = chk.Suite(&cmdIntegrationSuite{})
var ctx = context.Background()
const (
containerPrefix = "container"
blobPrefix = "blob"
blockBlobDefaultData = "AzCopy Random Test Data"
bucketPrefix = "s3bucket"
objectPrefix = "s3object"
objectDefaultData = "AzCopy default data for S3 object"
fileDefaultData = "AzCopy Random Test Data"
sharePrefix = "share"
azureFilePrefix = "azfile"
defaultAzureFileSizeInBytes = 1000
)
// This function generates an entity name by concatenating the passed prefix,
// the name of the test requesting the entity name, and the minute, second, and nanoseconds of the call.
// This should make it easy to associate the entities with their test, uniquely identify
// them, and determine the order in which they were created.
// Will truncate the end of the test name, if there is not enough room for it, followed by the time-based suffix,
// with a non-zero maxLen.
func generateName(prefix string, maxLen int) string {
// These next lines up through the for loop are obtaining and walking up the stack
// trace to extrat the test name, which is stored in name
pc := make([]uintptr, 10)
runtime.Callers(0, pc)
f := runtime.FuncForPC(pc[0])
name := f.Name()
for i := 0; !strings.Contains(name, "Suite"); i++ { // The tests are all scoped to the suite, so this ensures getting the actual test name
f = runtime.FuncForPC(pc[i])
name = f.Name()
}
funcNameStart := strings.Index(name, "Test")
name = name[funcNameStart+len("Test"):] // Just get the name of the test and not any of the garbage at the beginning
name = strings.ToLower(name) // Ensure it is a valid resource name
textualPortion := fmt.Sprintf("%s%s", prefix, strings.ToLower(name))
currentTime := time.Now()
numericSuffix := fmt.Sprintf("%02d%02d%d", currentTime.Minute(), currentTime.Second(), currentTime.Nanosecond())
if maxLen > 0 {
maxTextLen := maxLen - len(numericSuffix)
if maxTextLen < 1 {
panic("max len too short")
}
if len(textualPortion) > maxTextLen {
textualPortion = textualPortion[:maxTextLen]
}
}
name = textualPortion + numericSuffix
return name
}
func generateContainerName() string {
return generateName(containerPrefix, 63)
}
func generateBlobName() string {
return generateName(blobPrefix, 0)
}
func generateBucketName() string {
return generateName(bucketPrefix, 63)
}
func generateBucketNameWithCustomizedPrefix(customizedPrefix string) string {
return generateName(customizedPrefix, 63)
}
func generateObjectName() string {
return generateName(objectPrefix, 0)
}
func generateShareName() string {
return generateName(sharePrefix, 63)
}
func getShareURL(c *chk.C, fsu azfile.ServiceURL) (share azfile.ShareURL, name string) {
name = generateShareName()
share = fsu.NewShareURL(name)
return share, name
}
func generateAzureFileName() string {
return generateName(azureFilePrefix, 0)
}
func getContainerURL(c *chk.C, bsu azblob.ServiceURL) (container azblob.ContainerURL, name string) {
name = generateContainerName()
container = bsu.NewContainerURL(name)
return container, name
}
func getBlockBlobURL(c *chk.C, container azblob.ContainerURL, prefix string) (blob azblob.BlockBlobURL, name string) {
name = prefix + generateBlobName()
blob = container.NewBlockBlobURL(name)
return blob, name
}
func getAppendBlobURL(c *chk.C, container azblob.ContainerURL, prefix string) (blob azblob.AppendBlobURL, name string) {
name = generateBlobName()
blob = container.NewAppendBlobURL(prefix + name)
return blob, name
}
func getPageBlobURL(c *chk.C, container azblob.ContainerURL, prefix string) (blob azblob.PageBlobURL, name string) {
name = generateBlobName()
blob = container.NewPageBlobURL(prefix + name)
return
}
func getAzureFileURL(c *chk.C, shareURL azfile.ShareURL, prefix string) (fileURL azfile.FileURL, name string) {
name = prefix + generateAzureFileName()
fileURL = shareURL.NewRootDirectoryURL().NewFileURL(name)
return
}
func getReaderToRandomBytes(n int) *bytes.Reader {
r, _ := getRandomDataAndReader(n)
return r
}
func getRandomDataAndReader(n int) (*bytes.Reader, []byte) {
data := make([]byte, n, n)
rand.Read(data)
return bytes.NewReader(data), data
}
func getAccountAndKey() (string, string) {
name := os.Getenv("ACCOUNT_NAME")
key := os.Getenv("ACCOUNT_KEY")
if name == "" || key == "" {
panic("ACCOUNT_NAME and ACCOUNT_KEY environment vars must be set before running tests")
}
return name, key
}
func getBSU() azblob.ServiceURL {
accountName, accountKey := getAccountAndKey()
u, _ := url.Parse(fmt.Sprintf("https://%s.blob.core.windows.net/", accountName))
credential, err := azblob.NewSharedKeyCredential(accountName, accountKey)
if err != nil {
panic(err)
}
pipeline := azblob.NewPipeline(credential, azblob.PipelineOptions{})
return azblob.NewServiceURL(*u, pipeline)
}
func getFSU() azfile.ServiceURL {
accountName, accountKey := getAccountAndKey()
u, _ := url.Parse(fmt.Sprintf("https://%s.file.core.windows.net/", accountName))
credential, err := azfile.NewSharedKeyCredential(accountName, accountKey)
if err != nil {
panic(err)
}
pipeline := azfile.NewPipeline(credential, azfile.PipelineOptions{})
return azfile.NewServiceURL(*u, pipeline)
}
func createNewContainer(c *chk.C, bsu azblob.ServiceURL) (container azblob.ContainerURL, name string) {
container, name = getContainerURL(c, bsu)
cResp, err := container.Create(ctx, nil, azblob.PublicAccessNone)
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, 201)
return container, name
}
func createNewBlockBlob(c *chk.C, container azblob.ContainerURL, prefix string) (blob azblob.BlockBlobURL, name string) {
blob, name = getBlockBlobURL(c, container, prefix)
cResp, err := blob.Upload(ctx, strings.NewReader(blockBlobDefaultData), azblob.BlobHTTPHeaders{},
nil, azblob.BlobAccessConditions{})
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, 201)
return
}
func createNewAzureShare(c *chk.C, fsu azfile.ServiceURL) (share azfile.ShareURL, name string) {
share, name = getShareURL(c, fsu)
cResp, err := share.Create(ctx, nil, 0)
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, 201)
return share, name
}
func createNewAzureFile(c *chk.C, share azfile.ShareURL, prefix string) (file azfile.FileURL, name string) {
file, name = getAzureFileURL(c, share, prefix)
// generate parents first
generateParentsForAzureFile(c, file)
cResp, err := file.Create(ctx, defaultAzureFileSizeInBytes, azfile.FileHTTPHeaders{}, azfile.Metadata{})
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, 201)
return
}
func generateParentsForAzureFile(c *chk.C, fileURL azfile.FileURL) {
accountName, accountKey := getAccountAndKey()
credential, _ := azfile.NewSharedKeyCredential(accountName, accountKey)
err := ste.CreateParentDirToRoot(ctx, fileURL, azfile.NewPipeline(credential, azfile.PipelineOptions{}))
c.Assert(err, chk.IsNil)
}
func createNewAppendBlob(c *chk.C, container azblob.ContainerURL, prefix string) (blob azblob.AppendBlobURL, name string) {
blob, name = getAppendBlobURL(c, container, prefix)
resp, err := blob.Create(ctx, azblob.BlobHTTPHeaders{}, nil, azblob.BlobAccessConditions{})
c.Assert(err, chk.IsNil)
c.Assert(resp.StatusCode(), chk.Equals, 201)
return
}
func createNewPageBlob(c *chk.C, container azblob.ContainerURL, prefix string) (blob azblob.PageBlobURL, name string) {
blob, name = getPageBlobURL(c, container, prefix)
resp, err := blob.Create(ctx, azblob.PageBlobPageBytes*10, 0, azblob.BlobHTTPHeaders{}, nil, azblob.BlobAccessConditions{})
c.Assert(err, chk.IsNil)
c.Assert(resp.StatusCode(), chk.Equals, 201)
return
}
func deleteContainer(c *chk.C, container azblob.ContainerURL) {
resp, err := container.Delete(ctx, azblob.ContainerAccessConditions{})
c.Assert(err, chk.IsNil)
c.Assert(resp.StatusCode(), chk.Equals, 202)
}
func validateStorageError(c *chk.C, err error, code azblob.ServiceCodeType) {
serr, _ := err.(azblob.StorageError)
c.Assert(serr.ServiceCode(), chk.Equals, code)
}
func getRelativeTimeGMT(amount time.Duration) time.Time {
currentTime := time.Now().In(time.FixedZone("GMT", 0))
currentTime = currentTime.Add(amount * time.Second)
return currentTime
}
func generateCurrentTimeWithModerateResolution() time.Time {
highResolutionTime := time.Now().UTC()
return time.Date(highResolutionTime.Year(), highResolutionTime.Month(), highResolutionTime.Day(), highResolutionTime.Hour(), highResolutionTime.Minute(),
highResolutionTime.Second(), 0, highResolutionTime.Location())
}
type createS3ResOptions struct {
Location string
}
func createS3ClientWithMinio(o createS3ResOptions) (*minio.Client, error) {
accessKeyID := os.Getenv("AWS_ACCESS_KEY_ID")
secretAccessKey := os.Getenv("AWS_SECRET_ACCESS_KEY")
if accessKeyID == "" || secretAccessKey == "" {
return nil, fmt.Errorf("AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY should be set before creating the S3 client")
}
s3Client, err := minio.NewWithRegion("s3.amazonaws.com", accessKeyID, secretAccessKey, true, o.Location)
if err != nil {
return nil, err
}
return s3Client, nil
}
func createNewBucket(c *chk.C, client *minio.Client, o createS3ResOptions) string {
bucketName := generateBucketName()
err := client.MakeBucket(bucketName, o.Location)
c.Assert(err, chk.IsNil)
return bucketName
}
func createNewBucketWithName(c *chk.C, client *minio.Client, bucketName string, o createS3ResOptions) {
err := client.MakeBucket(bucketName, o.Location)
c.Assert(err, chk.IsNil)
}
func createNewObject(c *chk.C, client *minio.Client, bucketName string, prefix string) (objectKey string) {
objectKey = prefix + generateObjectName()
size := int64(len(objectDefaultData))
n, err := client.PutObject(bucketName, objectKey, strings.NewReader(objectDefaultData), size, minio.PutObjectOptions{})
c.Assert(err, chk.IsNil)
c.Assert(n, chk.Equals, size)
return
}
func deleteBucket(c *chk.C, client *minio.Client, bucketName string) {
objectsCh := make(chan string)
go func() {
defer close(objectsCh)
// List all objects from a bucket-name with a matching prefix.
for object := range client.ListObjectsV2(bucketName, "", true, context.Background().Done()) {
c.Assert(object.Err, chk.IsNil)
objectsCh <- object.Key
}
}()
// List bucket, and delete all the objects in the bucket
errChn := client.RemoveObjects(bucketName, objectsCh)
for err := range errChn {
c.Assert(err, chk.IsNil)
}
// Remove the bucket.
err := client.RemoveBucket(bucketName)
c.Assert(err, chk.IsNil)
}
func cleanS3Account(c *chk.C, client *minio.Client) {
buckets, err := client.ListBuckets()
c.Assert(err, chk.IsNil)
for _, bucket := range buckets {
deleteBucket(c, client, bucket.Name)
}
}
func getGenericCredentialForFile(accountType string) (*azfile.SharedKeyCredential, error) {
accountNameEnvVar := accountType + "ACCOUNT_NAME"
accountKeyEnvVar := accountType + "ACCOUNT_KEY"
accountName, accountKey := os.Getenv(accountNameEnvVar), os.Getenv(accountKeyEnvVar)
if accountName == "" || accountKey == "" {
return nil, errors.New(accountNameEnvVar + " and/or " + accountKeyEnvVar + " environment variables not specified.")
}
return azfile.NewSharedKeyCredential(accountName, accountKey)
}
func getAlternateFSU() (azfile.ServiceURL, error) {
secondaryAccountName, secondaryAccountKey := os.Getenv("SECONDARY_ACCOUNT_NAME"), os.Getenv("SECONDARY_ACCOUNT_KEY")
if secondaryAccountName == "" || secondaryAccountKey == "" {
return azfile.ServiceURL{}, errors.New("SECONDARY_ACCOUNT_NAME and/or SECONDARY_ACCOUNT_KEY environment variables not specified.")
}
fsURL, _ := url.Parse("https://" + secondaryAccountName + ".file.core.windows.net/")
credential, err := azfile.NewSharedKeyCredential(secondaryAccountName, secondaryAccountKey)
if err != nil {
return azfile.ServiceURL{}, err
}
pipeline := azfile.NewPipeline(credential, azfile.PipelineOptions{ /*Log: pipeline.NewLogWrapper(pipeline.LogInfo, log.New(os.Stderr, "", log.LstdFlags))*/ })
return azfile.NewServiceURL(*fsURL, pipeline), nil
}
func createNewShare(c *chk.C, fsu azfile.ServiceURL) (share azfile.ShareURL, name string) {
share, name = getShareURL(c, fsu)
cResp, err := share.Create(ctx, nil, 0)
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, 201)
return share, name
}
func deleteShare(c *chk.C, share azfile.ShareURL) {
_, err := share.Delete(ctx, azfile.DeleteSnapshotsOptionInclude)
c.Assert(err, chk.IsNil)
}
// Some tests require setting service properties. It can take up to 30 seconds for the new properties to be reflected across all FEs.
// We will enable the necessary property and try to run the test implementation. If it fails with an error that should be due to
// those changes not being reflected yet, we will wait 30 seconds and try the test again. If it fails this time for any reason,
// we fail the test. It is the responsibility of the the testImplFunc to determine which error string indicates the test should be retried.
// There can only be one such string. All errors that cannot be due to this detail should be asserted and not returned as an error string.
func runTestRequiringServiceProperties(c *chk.C, bsu azblob.ServiceURL, code string,
enableServicePropertyFunc func(*chk.C, azblob.ServiceURL),
testImplFunc func(*chk.C, azblob.ServiceURL) error,
disableServicePropertyFunc func(*chk.C, azblob.ServiceURL)) {
enableServicePropertyFunc(c, bsu)
defer disableServicePropertyFunc(c, bsu)
err := testImplFunc(c, bsu)
// We cannot assume that the error indicative of slow update will necessarily be a StorageError. As in ListBlobs.
if err != nil && err.Error() == code {
time.Sleep(time.Second * 30)
err = testImplFunc(c, bsu)
c.Assert(err, chk.IsNil)
}
}
func enableSoftDelete(c *chk.C, bsu azblob.ServiceURL) {
days := int32(1)
_, err := bsu.SetProperties(ctx, azblob.StorageServiceProperties{DeleteRetentionPolicy: &azblob.RetentionPolicy{Enabled: true, Days: &days}})
c.Assert(err, chk.IsNil)
}
func disableSoftDelete(c *chk.C, bsu azblob.ServiceURL) {
_, err := bsu.SetProperties(ctx, azblob.StorageServiceProperties{DeleteRetentionPolicy: &azblob.RetentionPolicy{Enabled: false}})
c.Assert(err, chk.IsNil)
}
func validateUpload(c *chk.C, blobURL azblob.BlockBlobURL) {
resp, err := blobURL.Download(ctx, 0, 0, azblob.BlobAccessConditions{}, false)
c.Assert(err, chk.IsNil)
data, _ := ioutil.ReadAll(resp.Response().Body)
c.Assert(data, chk.HasLen, 0)
}
func getContainerURLWithSAS(c *chk.C, credential azblob.SharedKeyCredential, containerName string) azblob.ContainerURL {
sasQueryParams, err := azblob.BlobSASSignatureValues{
Protocol: azblob.SASProtocolHTTPS,
ExpiryTime: time.Now().UTC().Add(48 * time.Hour),
ContainerName: containerName,
Permissions: azblob.ContainerSASPermissions{Read: true, Add: true, Write: true, Create: true, Delete: true, List: true}.String(),
}.NewSASQueryParameters(&credential)
c.Assert(err, chk.IsNil)
// construct the url from scratch
qp := sasQueryParams.Encode()
rawURL := fmt.Sprintf("https://%s.blob.core.windows.net/%s?%s",
credential.AccountName(), containerName, qp)
// convert the raw url and validate it was parsed successfully
fullURL, err := url.Parse(rawURL)
c.Assert(err, chk.IsNil)
// TODO perhaps we need a global default pipeline
return azblob.NewContainerURL(*fullURL, azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{}))
}
func getServiceURLWithSAS(c *chk.C, credential azblob.SharedKeyCredential) azblob.ServiceURL {
sasQueryParams, err := azblob.AccountSASSignatureValues{
Protocol: azblob.SASProtocolHTTPS,
ExpiryTime: time.Now().Add(48 * time.Hour),
Permissions: azfile.AccountSASPermissions{Read: true, List: true, Write: true, Delete: true, Add: true, Create: true, Update: true, Process: true}.String(),
Services: azfile.AccountSASServices{File: true, Blob: true, Queue: true}.String(),
ResourceTypes: azfile.AccountSASResourceTypes{Service: true, Container: true, Object: true}.String(),
}.NewSASQueryParameters(&credential)
c.Assert(err, chk.IsNil)
// construct the url from scratch
qp := sasQueryParams.Encode()
rawURL := fmt.Sprintf("https://%s.blob.core.windows.net/?%s",
credential.AccountName(), qp)
// convert the raw url and validate it was parsed successfully
fullURL, err := url.Parse(rawURL)
c.Assert(err, chk.IsNil)
return azblob.NewServiceURL(*fullURL, azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{}))
}
func getShareURLWithSAS(c *chk.C, credential azfile.SharedKeyCredential, shareName string) azfile.ShareURL {
sasQueryParams, err := azfile.FileSASSignatureValues{
Protocol: azfile.SASProtocolHTTPS,
ExpiryTime: time.Now().UTC().Add(48 * time.Hour),
ShareName: shareName,
Permissions: azfile.ShareSASPermissions{Read: true, Write: true, Create: true, Delete: true, List: true}.String(),
}.NewSASQueryParameters(&credential)
c.Assert(err, chk.IsNil)
// construct the url from scratch
qp := sasQueryParams.Encode()
rawURL := fmt.Sprintf("https://%s.file.core.windows.net/%s?%s",
credential.AccountName(), shareName, qp)
// convert the raw url and validate it was parsed successfully
fullURL, err := url.Parse(rawURL)
c.Assert(err, chk.IsNil)
// TODO perhaps we need a global default pipeline
return azfile.NewShareURL(*fullURL, azfile.NewPipeline(azfile.NewAnonymousCredential(), azfile.PipelineOptions{}))
}
<file_sep>package cmd
import (
"context"
"fmt"
"net/url"
"strings"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-file-go/azfile"
)
// copyS2SMigrationFileEnumerator enumerates file source, and submit request for copy file to N,
// where N stands for blob/file/blobFS (Currently only blob is supported).
// The source could be single file/directory/share/file account
type copyS2SMigrationFileEnumerator struct {
copyS2SMigrationEnumeratorBase
// source Azure File resources
srcFilePipeline pipeline.Pipeline
srcFileURLPartExtension fileURLPartsExtension
}
func (e *copyS2SMigrationFileEnumerator) initEnumerator(ctx context.Context, cca *cookedCopyCmdArgs) (err error) {
if err = e.initEnumeratorCommon(ctx, cca); err != nil {
return err
}
// append the sas at the end of query params.
e.sourceURL = gCopyUtil.appendQueryParamToUrl(e.sourceURL, cca.sourceSAS)
e.destURL = gCopyUtil.appendQueryParamToUrl(e.destURL, cca.destinationSAS)
// Create pipeline for source Azure File service.
// Note: only anonymous credential is supported for file source(i.e. SAS) now.
// e.CredentialInfo is for destination
srcCredInfo := common.CredentialInfo{CredentialType: common.ECredentialType.Anonymous()}
e.srcFilePipeline, err = createFilePipeline(ctx, srcCredInfo)
if err != nil {
return err
}
e.srcFileURLPartExtension = fileURLPartsExtension{azfile.NewFileURLParts(*e.sourceURL)}
// When need to do changing source validation, must get property(LMT) for Azure file during enumerating, in order to ensure that
// files are not changed since enumerating.
e.S2SGetPropertiesInBackend = cca.s2sPreserveProperties && cca.s2sGetPropertiesInBackend && !cca.s2sSourceChangeValidation
return nil
}
func (e *copyS2SMigrationFileEnumerator) enumerate(cca *cookedCopyCmdArgs) error {
ctx := context.TODO()
if err := e.initEnumerator(ctx, cca); err != nil {
return err
}
// Case-1: Source is single file
srcFileURL := azfile.NewFileURL(*e.sourceURL, e.srcFilePipeline)
// Verify if source is a single file
if e.srcFileURLPartExtension.isFileSyntactically() {
if fileProperties, err := srcFileURL.GetProperties(ctx); err == nil {
if e.isDestServiceSyntactically() {
return errSingleToAccountCopy
}
if endWithSlashOrBackSlash(e.destURL.Path) || e.isDestBucketSyntactically() {
fileName := gCopyUtil.getFileNameFromPath(e.srcFileURLPartExtension.DirectoryOrFilePath)
*e.destURL = urlExtension{*e.destURL}.generateObjectPath(fileName)
}
err := e.createDestBucket(ctx, *e.destURL, nil)
if err != nil {
return err
}
// Disable get properties in backend, as GetProperties already get full properties.
e.S2SGetPropertiesInBackend = false
// directly use destURL as destination
if err := e.addFileToNTransfer(srcFileURL.URL(), *e.destURL, fileProperties, cca); err != nil {
return err
}
return e.dispatchFinalPart(cca)
} else {
if isFatal := handleSingleFileValidationErrorForAzureFile(err); isFatal {
return err
}
}
}
// Case-2: Source is account, currently only support blob destination
if isAccountLevel, sharePrefix := e.srcFileURLPartExtension.isFileAccountLevelSearch(); isAccountLevel {
glcm.Info(infoCopyFromAccount)
if !cca.recursive {
return fmt.Errorf("cannot copy the entire account without recursive flag. Please use --recursive flag")
}
// Validate If destination is service level account.
if err := e.validateDestIsService(ctx, *e.destURL); err != nil {
return err
}
srcServiceURL := azfile.NewServiceURL(e.srcFileURLPartExtension.getServiceURL(), e.srcFilePipeline)
fileOrDirectoryPrefix, fileNamePattern, _ := e.srcFileURLPartExtension.searchPrefixFromFileURL()
// List shares and add transfers for these shares.
if err := e.addTransferFromAccount(ctx, srcServiceURL, *e.destURL, sharePrefix, fileOrDirectoryPrefix,
fileNamePattern, cca); err != nil {
return err
}
} else { // Case-3: Source is a file share or directory
glcm.Info(infoCopyFromDirectoryListOfFiles) // Share is mapped to root directory
searchPrefix, fileNamePattern, isWildcardSearch := e.srcFileURLPartExtension.searchPrefixFromFileURL()
if fileNamePattern == "*" && !cca.recursive && !isWildcardSearch {
return fmt.Errorf("cannot copy the entire share or directory without recursive flag. Please use --recursive flag")
}
if err := e.createDestBucket(ctx, *e.destURL, nil); err != nil {
return err
}
if err := e.addTransfersFromDirectory(ctx,
azfile.NewShareURL(e.srcFileURLPartExtension.getShareURL(), e.srcFilePipeline).NewRootDirectoryURL(),
*e.destURL,
searchPrefix,
fileNamePattern,
e.srcFileURLPartExtension.getParentSourcePath(),
false,
isWildcardSearch,
cca); err != nil {
return err
}
}
// If part number is 0 && number of transfer queued is 0
// it means that no job part has been dispatched and there are no
// transfer in Job to dispatch a JobPart.
if e.PartNum == 0 && len(e.Transfers) == 0 {
return fmt.Errorf("no transfer queued to copy. Please verify the source / destination")
}
// dispatch the JobPart as Final Part of the Job
return e.dispatchFinalPart(cca)
}
// addTransferFromAccount enumerates shares in account, and adds matched file into transfer.
func (e *copyS2SMigrationFileEnumerator) addTransferFromAccount(ctx context.Context,
srcServiceURL azfile.ServiceURL, destBaseURL url.URL,
sharePrefix, fileOrDirectoryPrefix, fileNamePattern string, cca *cookedCopyCmdArgs) error {
return enumerateSharesInAccount(
ctx,
srcServiceURL,
sharePrefix,
func(shareItem azfile.ShareItem) error {
// Whatever the destination type is, it should be equivalent to account level,
// so directly append share name to it.
tmpDestURL := urlExtension{URL: destBaseURL}.generateObjectPath(shareItem.Name)
// create bucket for destination, in case bucket doesn't exist.
if err := e.createDestBucket(ctx, tmpDestURL, nil); err != nil {
return err
}
// Two cases for exclude/include which need to match share names in account:
// a. https://<fileservice>/share*/file*.vhd
// b. https://<fileservice>/ which equals to https://<fileservice>/*
return e.addTransfersFromDirectory(
ctx,
srcServiceURL.NewShareURL(shareItem.Name).NewRootDirectoryURL(),
tmpDestURL,
fileOrDirectoryPrefix,
fileNamePattern,
"",
true,
true,
cca)
})
}
// addTransfersFromDirectory enumerates files in directory and sub directoreis,
// and adds matched file into transfer.
func (e *copyS2SMigrationFileEnumerator) addTransfersFromDirectory(ctx context.Context,
srcDirectoryURL azfile.DirectoryURL, destBaseURL url.URL,
fileOrDirNamePrefix, fileNamePattern, parentSourcePath string,
includExcludeShare, isWildcardSearch bool, cca *cookedCopyCmdArgs) error {
fileFilter := func(fileItem azfile.FileItem, fileURL azfile.FileURL) bool {
fileURLPart := azfile.NewFileURLParts(fileURL.URL())
// Check if file name matches pattern.
if !gCopyUtil.matchBlobNameAgainstPattern(fileNamePattern, fileURLPart.DirectoryOrFilePath, cca.recursive) {
return false
}
includeExcludeMatchPath := common.IffString(includExcludeShare,
fileURLPart.ShareName+"/"+fileURLPart.DirectoryOrFilePath,
fileURLPart.DirectoryOrFilePath)
// Check the file should be included or not.
if !gCopyUtil.resourceShouldBeIncluded(parentSourcePath, e.Include, includeExcludeMatchPath) {
return false
}
// Check the file should be excluded or not.
if gCopyUtil.resourceShouldBeExcluded(parentSourcePath, e.Exclude, includeExcludeMatchPath) {
return false
}
return true
}
// enumerate files and sub directories in directory, and add matched files into transfer.
return enumerateDirectoriesAndFilesInShare(
ctx,
srcDirectoryURL,
fileOrDirNamePrefix,
cca.recursive,
fileFilter,
func(fileItem azfile.FileItem, fileURL azfile.FileURL) error {
fileURLPart := azfile.NewFileURLParts(fileURL.URL())
var fileRelativePath = ""
// As downloading blob logic temporarily, refactor after scenario ensured.
if isWildcardSearch {
fileRelativePath = strings.Replace(fileURLPart.DirectoryOrFilePath,
fileOrDirNamePrefix[:strings.LastIndex(fileOrDirNamePrefix, common.AZCOPY_PATH_SEPARATOR_STRING)+1], "", 1)
} else {
fileRelativePath = gCopyUtil.getRelativePath(fileOrDirNamePrefix, fileURLPart.DirectoryOrFilePath)
}
// TODO: Remove get attribute, when file's list method can return property and metadata directly.
// As changing source validation need LMT which is not returned during list, enforce to get property if s2sSourceChangeValidation is enabled.
if (cca.s2sPreserveProperties && !cca.s2sGetPropertiesInBackend) || cca.s2sSourceChangeValidation {
p, err := fileURL.GetProperties(ctx)
if err != nil {
return err
}
return e.addFileToNTransfer(
fileURL.URL(),
urlExtension{URL: destBaseURL}.generateObjectPath(fileRelativePath),
p,
cca)
} else {
return e.addFileToNTransfer2(
fileURL.URL(),
urlExtension{URL: destBaseURL}.generateObjectPath(fileRelativePath),
fileItem.Properties,
cca)
}
})
}
func (e *copyS2SMigrationFileEnumerator) addFileToNTransfer(srcURL, destURL url.URL, properties *azfile.FileGetPropertiesResponse,
cca *cookedCopyCmdArgs) error {
return e.addTransfer(common.CopyTransfer{
Source: gCopyUtil.stripSASFromFileShareUrl(srcURL).String(),
Destination: gCopyUtil.stripSASFromBlobUrl(destURL).String(), // Optimize this if more target resource types need be supported.
LastModifiedTime: properties.LastModified(),
SourceSize: properties.ContentLength(),
ContentType: properties.ContentType(),
ContentEncoding: properties.ContentEncoding(),
ContentDisposition: properties.ContentDisposition(),
ContentLanguage: properties.ContentLanguage(),
CacheControl: properties.CacheControl(),
ContentMD5: properties.ContentMD5(),
Metadata: common.FromAzFileMetadataToCommonMetadata(properties.NewMetadata())},
cca)
}
func (e *copyS2SMigrationFileEnumerator) addFileToNTransfer2(srcURL, destURL url.URL, properties *azfile.FileProperty,
cca *cookedCopyCmdArgs) error {
return e.addTransfer(common.CopyTransfer{
Source: gCopyUtil.stripSASFromFileShareUrl(srcURL).String(),
Destination: gCopyUtil.stripSASFromBlobUrl(destURL).String(), // Optimize this if more target resource types need be supported.
SourceSize: properties.ContentLength},
cca)
}
func (e *copyS2SMigrationFileEnumerator) addTransfer(transfer common.CopyTransfer, cca *cookedCopyCmdArgs) error {
return addTransfer(&(e.CopyJobPartOrderRequest), transfer, cca)
}
func (e *copyS2SMigrationFileEnumerator) dispatchFinalPart(cca *cookedCopyCmdArgs) error {
return dispatchFinalPart(&(e.CopyJobPartOrderRequest), cca)
}
func (e *copyS2SMigrationFileEnumerator) partNum() common.PartNumber {
return e.PartNum
}
<file_sep>package cmd
import (
"context"
"errors"
"fmt"
"net/http"
"net/url"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
)
var errSingleToAccountCopy = errors.New("invalid source and destination combination for service to service copy: " +
"destination cannot be a service URL, when source is a single file")
// copyS2SMigrationEnumeratorBase is the base of other service to service copy enumerators,
// which contains common functions and properties.
type copyS2SMigrationEnumeratorBase struct {
common.CopyJobPartOrderRequest
// object used for destination pre-operations: e.g. create container/share/bucket and etc.
destBlobPipeline pipeline.Pipeline
// copy source
sourceURL *url.URL
// copy destination
destURL *url.URL
}
// initEnumeratorCommon inits common properties for enumerator.
func (e *copyS2SMigrationEnumeratorBase) initEnumeratorCommon(ctx context.Context, cca *cookedCopyCmdArgs) (err error) {
// attempt to parse the source and destination url
if e.sourceURL, err = url.Parse(gCopyUtil.replaceBackSlashWithSlash(cca.source)); err != nil {
return errors.New("cannot parse source URL")
}
if e.destURL, err = url.Parse(gCopyUtil.replaceBackSlashWithSlash(cca.destination)); err != nil {
return errors.New("cannot parse destination URL")
}
if e.sourceURL.Scheme != "https" || e.destURL.Scheme != "https" {
return errors.New("S2S copy requires source and destination URLs using the https protocol scheme")
}
if err := e.initDestPipeline(ctx); err != nil {
return err
}
e.S2SSourceChangeValidation = cca.s2sSourceChangeValidation
e.S2SInvalidMetadataHandleOption = cca.s2sInvalidMetadataHandleOption
return nil
}
// initDestPipeline inits destination pipelines shared for destination operations.
func (e *copyS2SMigrationEnumeratorBase) initDestPipeline(ctx context.Context) error {
switch e.FromTo {
// Currently, e.CredentialInfo is always for the target needs to trigger copy API.
// In this case, blob destination will use it which needs to call StageBlockFromURL later.
case common.EFromTo.BlobBlob(), common.EFromTo.FileBlob(), common.EFromTo.S3Blob():
p, err := createBlobPipeline(ctx, e.CredentialInfo)
if err != nil {
return err
}
e.destBlobPipeline = p
default:
panic(fmt.Errorf("invalid from-to pair, %v", e.FromTo))
}
return nil
}
// createDestBucket creates bucket level resource for destination, e.g. container for blob, share for file, and etc.
// TODO: Ensure if metadata in bucket level need be copied, currently not copy metadata in bucket level as azcopy-v1.
func (e *copyS2SMigrationEnumeratorBase) createDestBucket(ctx context.Context, destURL url.URL, metadata common.Metadata) error {
// TODO: For dry run, createDestBucket should do nothing and directly return.
switch e.FromTo {
case common.EFromTo.BlobBlob(), common.EFromTo.FileBlob(), common.EFromTo.S3Blob():
if e.destBlobPipeline == nil {
panic(errors.New("invalid state, blob type destination's pipeline is not initialized"))
}
tmpContainerURL := blobURLPartsExtension{azblob.NewBlobURLParts(destURL)}.getContainerURL()
containerURL := azblob.NewContainerURL(tmpContainerURL, e.destBlobPipeline)
// Create the container, in case of it doesn't exist.
_, err := containerURL.Create(ctx, metadata.ToAzBlobMetadata(), azblob.PublicAccessNone)
if err != nil {
// Skip the error, when container already exists.
stgErr, isStgErr := err.(azblob.StorageError)
if isStgErr && stgErr.ServiceCode() == azblob.ServiceCodeContainerAlreadyExists {
return nil
}
// Skip the error, when azcopy doesn't have permission to create container, and fail to get the info whether container exists.
// As when it's destination with WRITE only permission, azcopy should try to suppose container already exists and continue transfer.
if isStgErr && stgErr.Response().StatusCode == http.StatusForbidden { // In this case, we don't know if the container already exists.
if _, getErr := containerURL.GetProperties(ctx, azblob.LeaseAccessConditions{}); getErr == nil {
// The container already exists, ignore the create error
return nil
} else {
// Cannot get the info whether container exists.
stgErr, isStgErr := getErr.(azblob.StorageError)
if !isStgErr || stgErr.Response().StatusCode != http.StatusNotFound {
return nil
}
}
}
return fmt.Errorf("fail to create container, %v", err)
}
default:
panic(fmt.Errorf("invalid from-to pair, %v", e.FromTo))
}
return nil
}
// validateDestIsService check if destination is a service level URL.
func (e *copyS2SMigrationEnumeratorBase) validateDestIsService(ctx context.Context, destURL url.URL) error {
switch e.FromTo {
case common.EFromTo.BlobBlob(), common.EFromTo.FileBlob(), common.EFromTo.S3Blob():
if e.destBlobPipeline == nil {
panic(errors.New("invalid state, blob type destination's pipeline is not initialized"))
}
destServiceURL := azblob.NewServiceURL(destURL, e.destBlobPipeline)
if _, err := destServiceURL.GetProperties(ctx); err != nil {
return fmt.Errorf("invalid source and destination combination for service to service copy: "+
"destination must point to service account in current scenario, error when checking destination properties, %v", err)
}
default:
panic(fmt.Errorf("invalid from-to pair, %v", e.FromTo))
}
return nil
}
// isDestServiceSyntactically check if destination could be a service level URL through URL parsing.
func (e *copyS2SMigrationEnumeratorBase) isDestServiceSyntactically() bool {
switch e.FromTo {
case common.EFromTo.BlobBlob(), common.EFromTo.FileBlob(), common.EFromTo.S3Blob():
dsue := blobURLPartsExtension{BlobURLParts: azblob.NewBlobURLParts(*e.destURL)}
return dsue.isServiceSyntactically()
default:
panic(fmt.Errorf("invalid from-to pair, %v", e.FromTo))
}
}
// isDestServiceSyntactically check if destination could be a bucket/container/share level URL through URL parsing.
func (e *copyS2SMigrationEnumeratorBase) isDestBucketSyntactically() bool {
switch e.FromTo {
case common.EFromTo.BlobBlob(), common.EFromTo.FileBlob(), common.EFromTo.S3Blob():
dsue := blobURLPartsExtension{BlobURLParts: azblob.NewBlobURLParts(*e.destURL)}
return dsue.isContainerSyntactically()
default:
panic(fmt.Errorf("invalid from-to pair, %v", e.FromTo))
}
}
<file_sep>package cmd
import (
"context"
"errors"
"fmt"
"net/url"
"strings"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/minio/minio-go"
"github.com/minio/minio-go/pkg/s3utils"
)
// copyS2SMigrationS3Enumerator enumerates S3 source, and submits request for copy S3 to Blob and etc.
// The source could be point to S3 object, bucket or service.
type copyS2SMigrationS3Enumerator struct {
copyS2SMigrationEnumeratorBase
// source S3 resources
s3Client *minio.Client
s3URLParts s3URLPartsExtension
}
func (e *copyS2SMigrationS3Enumerator) initEnumerator(ctx context.Context, cca *cookedCopyCmdArgs) (err error) {
// TODO: Remove the message after S3 to blob feature GA.
glcm.Info("AWS S3 to Azure Blob copy is currently in preview. Validate the copy operation carefully before removing your data at source.")
if err = e.initEnumeratorCommon(ctx, cca); err != nil {
return err
}
e.destURL = gCopyUtil.appendQueryParamToUrl(e.destURL, cca.destinationSAS)
// Check whether the source URL is a valid S3 URL, and parse URL parts.
if s3URLParts, err := common.NewS3URLParts(*e.sourceURL); err != nil {
return err
} else {
e.s3URLParts = s3URLPartsExtension{s3URLParts}
}
if e.s3Client, err = common.CreateS3Client(
ctx,
common.CredentialInfo{
CredentialType: common.ECredentialType.S3AccessKey(), // Currently only support access key
S3CredentialInfo: common.S3CredentialInfo{
Endpoint: e.s3URLParts.Endpoint,
Region: e.s3URLParts.Region,
},
},
common.CredentialOpOptions{
LogError: glcm.Error,
// LogInfo: glcm.Info, // Uncomment for debugging purpose
},
); err != nil {
return err
}
e.S2SGetPropertiesInBackend = cca.s2sPreserveProperties && cca.s2sGetPropertiesInBackend
return
}
func (e *copyS2SMigrationS3Enumerator) enumerate(cca *cookedCopyCmdArgs) error {
ctx := context.TODO() // This would better be singleton in cmd module, and passed from caller.
if err := e.initEnumerator(ctx, cca); err != nil {
return err
}
// Start enumerating.
// Case-1: Source is a single object.
// Verify if source is a single object, note that s3URLParts only verifies resource type through parsing URL from syntax aspect.
if e.s3URLParts.IsObjectSyntactically() && !e.s3URLParts.IsDirectorySyntactically() {
if objectInfo, err := e.s3Client.StatObject(e.s3URLParts.BucketName, e.s3URLParts.ObjectKey, minio.StatObjectOptions{}); err == nil {
// The source is a single object.
if e.isDestServiceSyntactically() {
return errSingleToAccountCopy
}
if endWithSlashOrBackSlash(e.destURL.Path) || e.isDestBucketSyntactically() {
fileName := gCopyUtil.getFileNameFromPath(e.s3URLParts.ObjectKey)
*e.destURL = urlExtension{*e.destURL}.generateObjectPath(fileName)
}
if err := e.createDestBucket(ctx, *e.destURL, nil); err != nil {
return err
}
// Disable get properties in backend, as StatObject already get full properties.
e.S2SGetPropertiesInBackend = false
if err := e.addObjectToNTransfer(*e.sourceURL, *e.destURL, &objectInfo, cca); err != nil {
return err
}
return e.dispatchFinalPart(cca)
} else {
handleSingleFileValidationErrorForS3(err)
}
}
// Case-2: Source is a service endpoint.
if isServiceLevel, bucketPrefix := e.s3URLParts.isServiceLevelSearch(); isServiceLevel {
glcm.Info(infoCopyFromAccount)
if !cca.recursive {
return fmt.Errorf("cannot copy the entire S3 service without recursive flag. Please use --recursive flag")
}
// Validate if destination is service level account.
if err := e.validateDestIsService(ctx, *e.destURL); err != nil {
return err
}
objectPrefix, objectPattern, _ := e.s3URLParts.searchObjectPrefixAndPatternFromS3URL()
// List buckets and add transfers for these buckets.
if err := e.addTransferFromService(ctx, e.s3Client, *e.destURL,
bucketPrefix, objectPrefix, objectPattern, cca); err != nil {
return err
}
} else { // Case-3: Source is a bucket or virutal directory.
glcm.Info(infoCopyFromBucketDirectoryListOfFiles)
// Ensure there is a valid bucket name in this case.
if err := s3utils.CheckValidBucketNameStrict(e.s3URLParts.BucketName); err != nil {
return err
}
objectPrefix, objectPattern, isWildcardSearch := e.s3URLParts.searchObjectPrefixAndPatternFromS3URL()
if objectPattern == "*" && !cca.recursive && !isWildcardSearch {
return fmt.Errorf("cannot copy the entire bucket or directory without recursive flag. Please use --recursive flag")
}
// Check if destination is point to an Azure service.
// If destination is an Azure service, azcopy tries to create a bucket(container, share or etc) with source's bucket name,
// and then copy from source bucket to created destination bucket(container, share or etc).
// Otherwise, if destination is not service level resource, AzCopy will keep it as a bucket level resource or directory level resource,
// and directly copy from source to destination.
if err := e.validateDestIsService(ctx, *e.destURL); err == nil {
// name resolver is used only when the target URL is inferred from source URL.
s3BucketNameResolver := NewS3BucketNameToAzureResourcesResolver([]string{e.s3URLParts.BucketName})
resolvedBucketName, err := s3BucketNameResolver.ResolveName(e.s3URLParts.BucketName)
if err != nil {
glcm.Error(err.Error())
return errors.New("fail to add transfer, the source bucket has invalid name for Azure. " +
"Please include the destination container/share/filesystem name in the destination URL.")
}
*e.destURL = urlExtension{*e.destURL}.generateObjectPath(resolvedBucketName)
glcm.Info(fmt.Sprintf("source is bucket and destination is an Azure service endpoint, "+
"bucket with name %q will be created in destination to store data", resolvedBucketName))
}
// create bucket for destination, in case bucket doesn't exist.
if err := e.createDestBucket(ctx, *e.destURL, nil); err != nil {
return err
}
if err := e.addTransfersFromBucket(ctx, e.s3Client, *e.destURL,
e.s3URLParts.BucketName,
objectPrefix,
objectPattern,
e.s3URLParts.getParentSourcePath(),
false,
isWildcardSearch,
cca); err != nil {
return err
}
}
// If part number is 0 && number of transfer queued is 0
// it means that no job part has been dispatched and there are no
// transfer in Job to dispatch a JobPart.
if e.PartNum == 0 && len(e.Transfers) == 0 {
return fmt.Errorf("no transfer queued to copy. Please verify the source / destination")
}
// dispatch the JobPart as Final Part of the Job
return e.dispatchFinalPart(cca)
}
// addTransferFromService enumerates buckets in service, and adds matched file into transfer.
func (e *copyS2SMigrationS3Enumerator) addTransferFromService(ctx context.Context,
s3Client *minio.Client, destBaseURL url.URL,
bucketPrefix, objectPrefix, objectPattern string, cca *cookedCopyCmdArgs) error {
// List buckets.
bucketInfos, err := s3Client.ListBuckets()
if err != nil {
return fmt.Errorf("cannot list buckets, %v", err)
}
// Create name resolver.
var bucketNames []string
for _, bucketInfo := range bucketInfos {
bucketNames = append(bucketNames, bucketInfo.Name)
}
r := NewS3BucketNameToAzureResourcesResolver(bucketNames)
// Validate name resolving, if there is any problem, do fast fail.
// At same time, if there is any resolving happened, print to user.
resolveErr := false
for _, bucketInfo := range bucketInfos {
if resolvedName, err := r.ResolveName(bucketInfo.Name); err != nil {
// For resolving failure, print to user.
glcm.Error(err.Error())
resolveErr = true
} else {
if resolvedName != bucketInfo.Name {
glcm.Info(fmt.Sprintf("s3 bucket name %q is invalid for Azure container/share/filesystem, and has been renamed to %q", bucketInfo.Name, resolvedName))
}
}
}
if resolveErr {
return errors.New("fail to add transfers from service, some of the buckets have invalid names for Azure. " +
"Please exclude the invalid buckets in service to service copy, and copy them use bucket to container/share/filesystem copy " +
"with customized destination name after the service to service copy finished")
}
// bucket filter selects buckets need to be involved into transfer.
bucketFilter := func(bucketInfo minio.BucketInfo) bool {
// Check if bucket name has given prefix.
if strings.HasPrefix(bucketInfo.Name, bucketPrefix) {
return true
}
return false
}
// defines action need be fulfilled to enumerate bucket further
bucketAction := func(bucketInfo minio.BucketInfo) error {
// Note: Name resolving is only for destination, source's bucket name should be kept for include/exclude/wildcard.
resolvedBucketName, _ := r.ResolveName(bucketInfo.Name) // No error here, as already validated.
// Whatever the destination type is, it should be equivalent to account level,
// so directly append bucket name to it.
tmpDestURL := urlExtension{URL: destBaseURL}.generateObjectPath(resolvedBucketName)
// create bucket for destination, in case bucket doesn't exist.
if err := e.createDestBucket(ctx, tmpDestURL, nil); err != nil {
return err
}
// Two cases for exclude/include which need to match bucket names in account:
// a. https://<service>/bucket*/obj*
// b. https://<service>/ which equals to https://<service>/*
return e.addTransfersFromBucket(ctx, s3Client, tmpDestURL, bucketInfo.Name, objectPrefix, objectPattern, "", true, true, cca)
}
return enumerateBucketsInServiceWithMinio(bucketInfos, bucketFilter, bucketAction)
}
// addTransfersFromBucket enumerates objects in bucket,
// and adds matched objects into transfer.
func (e *copyS2SMigrationS3Enumerator) addTransfersFromBucket(ctx context.Context,
s3Client *minio.Client, destBaseURL url.URL,
bucketName, objectNamePrefix, objectNamePattern, parentSourcePath string,
includExcludeBucket, isWildcardSearch bool, cca *cookedCopyCmdArgs) error {
// object filter selects objects need to be transferred.
objectFilter := func(objectInfo minio.ObjectInfo) bool {
// As design discussion, skip the object with suffix "/", which indicates the object represents a directory in S3 management console,
// considering there is no directory in Azure blob.
if strings.HasSuffix(objectInfo.Key, "/") {
return false
}
// Check if object name matches pattern.
if !gCopyUtil.matchBlobNameAgainstPattern(objectNamePattern, objectInfo.Key, cca.recursive) {
return false
}
includeExcludeMatchPath := common.IffString(includExcludeBucket,
bucketName+"/"+objectInfo.Key,
objectInfo.Key)
// Check the object should be included or not.
if !gCopyUtil.resourceShouldBeIncluded(parentSourcePath, e.Include, includeExcludeMatchPath) {
return false
}
// Check the object should be excluded or not.
if gCopyUtil.resourceShouldBeExcluded(parentSourcePath, e.Exclude, includeExcludeMatchPath) {
return false
}
return true
}
// defines action need be fulfilled to add selected object into transfer
objectAction := func(objectInfo minio.ObjectInfo) error {
var objectRelativePath = ""
if isWildcardSearch {
objectRelativePath = strings.Replace(objectInfo.Key,
objectNamePrefix[:strings.LastIndex(objectNamePrefix, common.AZCOPY_PATH_SEPARATOR_STRING)+1], "", 1)
} else {
objectRelativePath = gCopyUtil.getRelativePath(objectNamePrefix, objectInfo.Key)
}
// S3's list operations doesn't return object's properties, such as: content-encoding and etc.
// So azcopy need additional get request to collect these properties.
// When get S2S properties in backend is not enabled, get properties during enumerating.
if cca.s2sPreserveProperties && !cca.s2sGetPropertiesInBackend {
var err error
objectInfo, err = s3Client.StatObject(bucketName, objectInfo.Key, minio.StatObjectOptions{})
if err != nil {
return err
}
}
// Compose the source S3 object URL.
tmpS3URLPart := e.s3URLParts
tmpS3URLPart.BucketName = bucketName
tmpS3URLPart.ObjectKey = objectInfo.Key
return e.addObjectToNTransfer(
tmpS3URLPart.URL(),
urlExtension{URL: destBaseURL}.generateObjectPath(objectRelativePath),
&objectInfo,
cca)
}
// enumerate objects in bucket, and add matched objects into transfer.
err := enumerateObjectsInBucketWithMinio(ctx, s3Client, bucketName, objectNamePrefix, objectFilter, objectAction)
if err != nil {
// Handle the error that fail to list objects in bucket due to Location mismatch, which is caused by source endpoint doesn't match S3 buckets' regions
if strings.Contains(err.Error(), "301 response missing Location header") {
glcm.Info(fmt.Sprintf("skip enumerating the bucket %q, as it's not in the region specified by source URL", bucketName))
} else {
return err
}
}
return nil
}
func (e *copyS2SMigrationS3Enumerator) addObjectToNTransfer(srcURL, destURL url.URL, objectInfo *minio.ObjectInfo,
cca *cookedCopyCmdArgs) error {
oie := common.ObjectInfoExtension{ObjectInfo: *objectInfo}
copyTransfer := common.CopyTransfer{
Source: srcURL.String(),
Destination: gCopyUtil.stripSASFromBlobUrl(destURL).String(),
LastModifiedTime: objectInfo.LastModified,
SourceSize: objectInfo.Size,
ContentType: objectInfo.ContentType,
ContentEncoding: oie.ContentEncoding(),
ContentDisposition: oie.ContentDisposition(),
ContentLanguage: oie.ContentLanguage(),
CacheControl: oie.CacheControl(),
ContentMD5: oie.ContentMD5(),
Metadata: oie.NewCommonMetadata()}
return e.addTransfer(copyTransfer, cca)
}
func (e *copyS2SMigrationS3Enumerator) addTransfer(transfer common.CopyTransfer, cca *cookedCopyCmdArgs) error {
return addTransfer(&(e.CopyJobPartOrderRequest), transfer, cca)
}
func (e *copyS2SMigrationS3Enumerator) dispatchFinalPart(cca *cookedCopyCmdArgs) error {
return dispatchFinalPart(&(e.CopyJobPartOrderRequest), cca)
}
func (e *copyS2SMigrationS3Enumerator) partNum() common.PartNumber {
return e.PartNum
}
<file_sep>package cmd
import (
"context"
"fmt"
"math/rand"
"strings"
"time"
"github.com/Azure/azure-storage-azcopy/azbfs"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/Azure/azure-storage-file-go/azfile"
minio "github.com/minio/minio-go"
)
// addTransfer accepts a new transfer, if the threshold is reached, dispatch a job part order.
func addTransfer(e *common.CopyJobPartOrderRequest, transfer common.CopyTransfer, cca *cookedCopyCmdArgs) error {
// Remove the source and destination roots from the path to save space in the plan files
transfer.Source = strings.TrimPrefix(transfer.Source, e.SourceRoot)
transfer.Destination = strings.TrimPrefix(transfer.Destination, e.DestinationRoot)
// dispatch the transfers once the number reaches NumOfFilesPerDispatchJobPart
// we do this so that in the case of large transfer, the transfer engine can get started
// while the frontend is still gathering more transfers
if len(e.Transfers) == NumOfFilesPerDispatchJobPart {
shuffleTransfers(e.Transfers)
resp := common.CopyJobPartOrderResponse{}
Rpc(common.ERpcCmd.CopyJobPartOrder(), (*common.CopyJobPartOrderRequest)(e), &resp)
if !resp.JobStarted {
return fmt.Errorf("copy job part order with JobId %s and part number %d failed because %s", e.JobID, e.PartNum, resp.ErrorMsg)
}
// if the current part order sent to engine is 0, then start fetching the Job Progress summary.
if e.PartNum == 0 {
cca.waitUntilJobCompletion(false)
}
e.Transfers = []common.CopyTransfer{}
e.PartNum++
}
// only append the transfer after we've checked and dispatched a part
// so that there is at least one transfer for the final part
e.Transfers = append(e.Transfers, transfer)
return nil
}
// this function shuffles the transfers before they are dispatched
// this is done to avoid hitting the same partition continuously in an append only pattern
// TODO this should probably be removed after the high throughput block blob feature is implemented on the service side
func shuffleTransfers(transfers []common.CopyTransfer) {
rand.Seed(time.Now().UnixNano())
rand.Shuffle(len(transfers), func(i, j int) { transfers[i], transfers[j] = transfers[j], transfers[i] })
}
// we need to send a last part with isFinalPart set to true, along with whatever transfers that still haven't been sent
// dispatchFinalPart sends a last part with isFinalPart set to true, along with whatever transfers that still haven't been sent.
func dispatchFinalPart(e *common.CopyJobPartOrderRequest, cca *cookedCopyCmdArgs) error {
shuffleTransfers(e.Transfers)
e.IsFinalPart = true
var resp common.CopyJobPartOrderResponse
Rpc(common.ERpcCmd.CopyJobPartOrder(), (*common.CopyJobPartOrderRequest)(e), &resp)
if !resp.JobStarted {
return fmt.Errorf("copy job part order with JobId %s and part number %d failed because %s", e.JobID, e.PartNum, resp.ErrorMsg)
}
// set the flag on cca, to indicate the enumeration is done
cca.isEnumerationComplete = true
// if the current part order sent to engine is 0, then start fetching the Job Progress summary.
if e.PartNum == 0 {
cca.waitUntilJobCompletion(false)
}
return nil
}
var handleSingleFileValidationErrStr = "source is not validated as a single file: %v"
var infoCopyFromContainerDirectoryListOfFiles = "trying to copy the source as container/directory/list of files"
var infoCopyFromBucketDirectoryListOfFiles = "trying to copy the source as bucket/folder/list of files"
var infoCopyFromDirectoryListOfFiles = "trying to copy the source as directory/list of files"
var infoCopyFromAccount = "trying to copy the source account"
func handleSingleFileValidationErrorForBlob(err error) (stop bool) {
errRootCause := err.Error()
stgErr, isStgErr := err.(azblob.StorageError)
if isStgErr {
if stgErr.ServiceCode() == azblob.ServiceCodeInvalidAuthenticationInfo {
errRootCause = fmt.Sprintf("%s, Please ensure all login details are correct (or that you are signed into the correct tenant for this storage account)", stgErr.ServiceCode())
stop = true
} else if stgErr.ServiceCode() == azblob.ServiceCodeAuthenticationFailed {
errRootCause = fmt.Sprintf("%s, please check if SAS or OAuth is used properly, or source is a public blob", stgErr.ServiceCode())
stop = true
} else {
errRootCause = string(stgErr.ServiceCode())
}
}
if stop {
glcm.Info(errRootCause)
} else {
glcm.Info(fmt.Sprintf(handleSingleFileValidationErrStr, errRootCause))
}
return
}
func handleSingleFileValidationErrorForAzureFile(err error) (stop bool) {
errRootCause := err.Error()
stgErr, isStgErr := err.(azfile.StorageError)
if isStgErr {
if stgErr.ServiceCode() == azfile.ServiceCodeInvalidAuthenticationInfo {
errRootCause = fmt.Sprintf("%s, Please ensure all login details are correct (or that you are signed into the correct tenant for this storage account)", stgErr.ServiceCode())
stop = true
} else if stgErr.ServiceCode() == azfile.ServiceCodeAuthenticationFailed {
errRootCause = fmt.Sprintf("%s, please check if SAS is set properly", stgErr.ServiceCode())
stop = true
} else {
errRootCause = string(stgErr.ServiceCode())
}
}
if stop {
glcm.Info(errRootCause)
} else {
glcm.Info(fmt.Sprintf(handleSingleFileValidationErrStr, errRootCause))
}
return
}
func handleSingleFileValidationErrorForADLSGen2(err error) (stop bool) {
errRootCause := err.Error()
stgErr, isStgErr := err.(azbfs.StorageError)
if isStgErr {
if stgErr.ServiceCode() == azbfs.ServiceCodeInvalidAuthenticationInfo {
errRootCause = fmt.Sprintf("%s, Please ensure all login details are correct (or that you are signed into the correct tenant for this storage account)", stgErr.ServiceCode())
stop = true
} else if stgErr.ServiceCode() == azbfs.ServiceCodeAuthenticationFailed {
errRootCause = fmt.Sprintf("%s, please check if AccessKey or OAuth is used properly", stgErr.ServiceCode())
stop = true
} else {
errRootCause = string(stgErr.ServiceCode())
}
}
if stop {
glcm.Info(errRootCause)
} else {
glcm.Info(fmt.Sprintf(handleSingleFileValidationErrStr, errRootCause))
}
return
}
func handleSingleFileValidationErrorForS3(err error) {
glcm.Info(fmt.Sprintf(handleSingleFileValidationErrStr, err.Error()))
}
//////////////////////////////////////////////////////////////////////////////////////////
// Blob service enumerators.
//////////////////////////////////////////////////////////////////////////////////////////
// enumerateBlobsInContainer enumerates blobs in container.
func enumerateBlobsInContainer(ctx context.Context, containerURL azblob.ContainerURL,
blobPrefix string, filter func(blobItem azblob.BlobItem) bool,
callback func(blobItem azblob.BlobItem) error) error {
for marker := (azblob.Marker{}); marker.NotDone(); {
listContainerResp, err := containerURL.ListBlobsFlatSegment(
ctx, marker,
azblob.ListBlobsSegmentOptions{
Details: azblob.BlobListingDetails{Metadata: true},
Prefix: blobPrefix})
if err != nil {
return fmt.Errorf("cannot list blobs, %v", err)
}
// Process the blobs returned in this result segment (if the segment is empty, the loop body won't execute)
for _, blobItem := range listContainerResp.Segment.BlobItems {
// If the blob represents a folder as per the conditions mentioned in the
// api doesBlobRepresentAFolder, then skip the blob.
if gCopyUtil.doesBlobRepresentAFolder(blobItem.Metadata) {
continue
}
if !filter(blobItem) {
continue
}
if err := callback(blobItem); err != nil {
return err
}
}
marker = listContainerResp.NextMarker
}
return nil
}
// enumerateContainersInAccount enumerates containers in blob service account.
func enumerateContainersInAccount(ctx context.Context, srcServiceURL azblob.ServiceURL,
containerPrefix string, callback func(containerItem azblob.ContainerItem) error) error {
for marker := (azblob.Marker{}); marker.NotDone(); {
listSvcResp, err := srcServiceURL.ListContainersSegment(ctx, marker,
azblob.ListContainersSegmentOptions{Prefix: containerPrefix})
if err != nil {
return fmt.Errorf("cannot list containers, %v", err)
}
// Process the containers returned in this result segment (if the segment is empty, the loop body won't execute)
for _, containerItem := range listSvcResp.ContainerItems {
if err := callback(containerItem); err != nil {
return err
}
}
marker = listSvcResp.NextMarker
}
return nil
}
//////////////////////////////////////////////////////////////////////////////////////////
// File service enumerators.
//////////////////////////////////////////////////////////////////////////////////////////
// enumerateSharesInAccount enumerates shares in file service account.
func enumerateSharesInAccount(ctx context.Context, srcServiceURL azfile.ServiceURL,
sharePrefix string, callback func(shareItem azfile.ShareItem) error) error {
for marker := (azfile.Marker{}); marker.NotDone(); {
listSvcResp, err := srcServiceURL.ListSharesSegment(ctx, marker,
azfile.ListSharesOptions{Prefix: sharePrefix})
if err != nil {
return fmt.Errorf("cannot list shares, %v", err)
}
// Process the shares returned in this result segment (if the segment is empty, the loop body won't execute)
for _, shareItem := range listSvcResp.ShareItems {
if err := callback(shareItem); err != nil {
return err
}
}
marker = listSvcResp.NextMarker
}
return nil
}
// enumerateDirectoriesAndFilesInShare enumerates files in share.
// filePrefix could be:
// a. File with parent directories and file prefix: /d1/d2/fileprefix
// b. File with pure file prefix: fileprefix
// c. File with pur parent directories: /d1/d2/
func enumerateDirectoriesAndFilesInShare(ctx context.Context, srcDirURL azfile.DirectoryURL,
fileOrDirPrefix string, recursive bool,
filter func(fileItem azfile.FileItem, fileURL azfile.FileURL) bool,
callback func(fileItem azfile.FileItem, fileURL azfile.FileURL) error) error {
// Process the filePrefix, if the file prefix starts with parent directory,
// then it wishes to enumerate the directory with specific sub-directory,
// append the sub-directory to the src directory URL.
// e.g.: searching https://<azfile>/share/basedir, and prefix is /d1/d2/file
// the new source directory URL will be https://<azfile>/share/basedir/d1/d2
if len(fileOrDirPrefix) > 0 {
if fileOrDirPrefix[0] == common.AZCOPY_PATH_SEPARATOR_CHAR {
fileOrDirPrefix = fileOrDirPrefix[1:]
}
if lastSepIndex := strings.LastIndex(fileOrDirPrefix, common.AZCOPY_PATH_SEPARATOR_STRING); lastSepIndex > 0 {
subDirStr := fileOrDirPrefix[:lastSepIndex]
srcDirURL = srcDirURL.NewDirectoryURL(subDirStr)
fileOrDirPrefix = fileOrDirPrefix[lastSepIndex+1:]
}
}
// After preprocess, file prefix will no more contains '/'. It will be the prefix of
// file or dir in current dir level.
for marker := (azfile.Marker{}); marker.NotDone(); {
listDirResp, err := srcDirURL.ListFilesAndDirectoriesSegment(ctx, marker,
azfile.ListFilesAndDirectoriesOptions{Prefix: fileOrDirPrefix})
if err != nil {
return fmt.Errorf("cannot list files and directories, %v", err)
}
// Process the files returned in this result segment (if the segment is empty, the loop body won't execute)
for _, fileItem := range listDirResp.FileItems {
tmpFileURL := srcDirURL.NewFileURL(fileItem.Name)
if !filter(fileItem, tmpFileURL) {
continue
}
if err := callback(fileItem, tmpFileURL); err != nil {
return err
}
}
// Process the directories if the recursive mode is on
if recursive {
for _, dirItem := range listDirResp.DirectoryItems {
// Recursive with prefix set to ""
enumerateDirectoriesAndFilesInShare(
ctx,
srcDirURL.NewDirectoryURL(dirItem.Name),
"",
recursive,
filter,
callback)
}
}
marker = listDirResp.NextMarker
}
return nil
}
//////////////////////////////////////////////////////////////////////////////////////////
// ADLS Gen2 service enumerators.
//////////////////////////////////////////////////////////////////////////////////////////
// enumerateFilesInADLSGen2Directory enumerates files in ADLS Gen2 directory.
func enumerateFilesInADLSGen2Directory(ctx context.Context, directoryURL azbfs.DirectoryURL,
filter func(fileItem azbfs.Path) bool,
callback func(fileItem azbfs.Path) error) error {
marker := ""
for {
listDirResp, err := directoryURL.ListDirectorySegment(ctx, &marker, true)
if err != nil {
return fmt.Errorf("cannot list files, %v", err)
}
// Process the files returned in this result segment
for _, filePath := range listDirResp.Files() {
if !filter(filePath) {
continue
}
if err := callback(filePath); err != nil {
return err
}
}
// update the continuation token for the next list operation
marker = listDirResp.XMsContinuation()
// determine whether enumerating should be done
if marker == "" {
break
}
}
return nil
}
//////////////////////////////////////////////////////////////////////////////////////////
// S3 service enumerators.
//////////////////////////////////////////////////////////////////////////////////////////
// enumerateBucketsInServiceWithMinio is the helper for enumerating buckets in S3 service.
func enumerateBucketsInServiceWithMinio(bucketInfos []minio.BucketInfo,
filter func(bucketInfo minio.BucketInfo) bool,
callback func(bucketInfo minio.BucketInfo) error) error {
for _, bucketInfo := range bucketInfos {
if !filter(bucketInfo) {
continue
}
if err := callback(bucketInfo); err != nil {
return err
}
}
return nil
}
// enumerateObjectsInBucketWithMinio enumerates objects in bucket.
func enumerateObjectsInBucketWithMinio(ctx context.Context, s3Client *minio.Client, bucketName, objectNamePrefix string,
filter func(objectInfo minio.ObjectInfo) bool,
callback func(objectInfo minio.ObjectInfo) error) error {
for objectInfo := range s3Client.ListObjectsV2(bucketName, objectNamePrefix, true, ctx.Done()) {
if objectInfo.Err != nil {
return fmt.Errorf("cannot list objects, %v", objectInfo.Err)
}
if !filter(objectInfo) {
continue
}
if err := callback(objectInfo); err != nil {
return err
}
}
return nil
}
<file_sep>package cmd
import (
"context"
"fmt"
"net/url"
"strings"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
)
// copyS2SMigrationBlobEnumerator enumerates blob source, and submit request for copy blob to N,
// where N stands for blob/file/blobFS (Currently only blob is supported).
// The source could be a single blob/container/blob account
type copyS2SMigrationBlobEnumerator struct {
copyS2SMigrationEnumeratorBase
// source Azure Blob resources
srcBlobPipeline pipeline.Pipeline
srcBlobURLPartExtension blobURLPartsExtension
}
func (e *copyS2SMigrationBlobEnumerator) initEnumerator(ctx context.Context, cca *cookedCopyCmdArgs) (err error) {
if err = e.initEnumeratorCommon(ctx, cca); err != nil {
return err
}
// append the sas at the end of query params.
e.sourceURL = gCopyUtil.appendQueryParamToUrl(e.sourceURL, cca.sourceSAS)
e.destURL = gCopyUtil.appendQueryParamToUrl(e.destURL, cca.destinationSAS)
// Create pipeline for source Blob service.
// For copy source with blob type, only anonymous credential is supported now(i.e. SAS or public).
// So directoy create anonymous credential for source.
// Note: If traditional copy(download first, then upload need be supported), more logic should be added to parse and validate
// credential for both source and destination.
e.srcBlobPipeline, err = createBlobPipeline(ctx,
common.CredentialInfo{CredentialType: common.ECredentialType.Anonymous()})
if err != nil {
return err
}
e.srcBlobURLPartExtension = blobURLPartsExtension{azblob.NewBlobURLParts(*e.sourceURL)}
return nil
}
func (e *copyS2SMigrationBlobEnumerator) enumerate(cca *cookedCopyCmdArgs) error {
ctx := context.TODO()
if err := e.initEnumerator(ctx, cca); err != nil {
return err
}
// Case-1: Source is a single blob
// Verify if source is a single blob
srcBlobURL := azblob.NewBlobURL(*e.sourceURL, e.srcBlobPipeline)
if e.srcBlobURLPartExtension.isBlobSyntactically() {
if blobProperties, err := srcBlobURL.GetProperties(ctx, azblob.BlobAccessConditions{}); err == nil {
if e.isDestServiceSyntactically() {
return errSingleToAccountCopy
}
if endWithSlashOrBackSlash(e.destURL.Path) || e.isDestBucketSyntactically() {
fileName := gCopyUtil.getFileNameFromPath(e.srcBlobURLPartExtension.BlobName)
*e.destURL = urlExtension{*e.destURL}.generateObjectPath(fileName)
}
err := e.createDestBucket(ctx, *e.destURL, nil)
if err != nil {
return err
}
// directly use destURL as destination
if err := e.addBlobToNTransfer2(srcBlobURL.URL(), *e.destURL, blobProperties, cca); err != nil {
return err
}
return e.dispatchFinalPart(cca)
} else {
if isFatal := handleSingleFileValidationErrorForBlob(err); isFatal {
return err
}
}
}
// Case-2: Source is account level, e.g.:
// a: https://<blob-service>/
// b: https://<blob-service>/containerprefix*/vd/blob
if isAccountLevel, containerPrefix := e.srcBlobURLPartExtension.isBlobAccountLevelSearch(); isAccountLevel {
glcm.Info(infoCopyFromAccount)
if !cca.recursive {
return fmt.Errorf("cannot copy the entire account without recursive flag. Please use --recursive flag")
}
// Validate If destination is service level account.
if err := e.validateDestIsService(ctx, *e.destURL); err != nil {
return err
}
srcServiceURL := azblob.NewServiceURL(e.srcBlobURLPartExtension.getServiceURL(), e.srcBlobPipeline)
blobPrefix, blobNamePattern, _ := e.srcBlobURLPartExtension.searchPrefixFromBlobURL()
// List containers and add transfers for these containers.
if err := e.addTransferFromAccount(ctx, srcServiceURL, *e.destURL,
containerPrefix, blobPrefix, blobNamePattern, cca); err != nil {
return err
}
} else { // Case-3: Source is a blob container or directory
glcm.Info(infoCopyFromContainerDirectoryListOfFiles)
blobPrefix, blobNamePattern, isWildcardSearch := e.srcBlobURLPartExtension.searchPrefixFromBlobURL()
if blobNamePattern == "*" && !cca.recursive && !isWildcardSearch {
return fmt.Errorf("cannot copy the entire container or directory without recursive flag. Please use --recursive flag")
}
// create bucket for destination, in case bucket doesn't exist.
if err := e.createDestBucket(ctx, *e.destURL, nil); err != nil {
return err
}
if err := e.addTransfersFromContainer(ctx,
azblob.NewContainerURL(e.srcBlobURLPartExtension.getContainerURL(), e.srcBlobPipeline),
*e.destURL,
blobPrefix,
blobNamePattern,
e.srcBlobURLPartExtension.getParentSourcePath(),
false,
isWildcardSearch,
cca); err != nil {
return err
}
}
// If part number is 0 && number of transfer queued is 0
// it means that no job part has been dispatched and there are no
// transfer in Job to dispatch a JobPart.
if e.PartNum == 0 && len(e.Transfers) == 0 {
return fmt.Errorf("no transfer queued to copy. Please verify the source / destination")
}
// dispatch the JobPart as Final Part of the Job
return e.dispatchFinalPart(cca)
}
// addTransferFromAccount enumerates containers, and adds matched blob into transfer.
func (e *copyS2SMigrationBlobEnumerator) addTransferFromAccount(ctx context.Context,
srcServiceURL azblob.ServiceURL, destBaseURL url.URL,
containerPrefix, blobPrefix, blobNamePattern string, cca *cookedCopyCmdArgs) error {
return enumerateContainersInAccount(
ctx,
srcServiceURL,
containerPrefix,
func(containerItem azblob.ContainerItem) error {
// Whatever the destination type is, it should be equivalent to account level,
// so directly append container name to it.
tmpDestURL := urlExtension{URL: destBaseURL}.generateObjectPath(containerItem.Name)
// create bucket for destination, in case bucket doesn't exist.
if err := e.createDestBucket(ctx, tmpDestURL, nil); err != nil {
return err
}
// Two cases for exclude/include which need to match container names in account:
// a. https://<blobservice>/container*/blob*.vhd
// b. https://<blobservice>/ which equals to https://<blobservice>/*
return e.addTransfersFromContainer(
ctx,
srcServiceURL.NewContainerURL(containerItem.Name),
tmpDestURL,
blobPrefix,
blobNamePattern,
"",
true,
true,
cca)
})
}
// addTransfersFromContainer enumerates blobs in container, and adds matched blob into transfer.
func (e *copyS2SMigrationBlobEnumerator) addTransfersFromContainer(ctx context.Context, srcContainerURL azblob.ContainerURL, destBaseURL url.URL,
blobNamePrefix, blobNamePattern, parentSourcePath string, includExcludeContainer, isWildcardSearch bool, cca *cookedCopyCmdArgs) error {
blobFilter := func(blobItem azblob.BlobItem) bool {
// If the blobName doesn't matches the blob name pattern, then blob is not included
// queued for transfer.
if !gCopyUtil.matchBlobNameAgainstPattern(blobNamePattern, blobItem.Name, cca.recursive) {
return false
}
includeExcludeMatchPath := common.IffString(includExcludeContainer,
azblob.NewBlobURLParts(srcContainerURL.URL()).ContainerName+"/"+blobItem.Name,
blobItem.Name)
// Check the blob should be included or not.
if !gCopyUtil.resourceShouldBeIncluded(parentSourcePath, e.Include, includeExcludeMatchPath) {
return false
}
// Check the blob should be excluded or not.
if gCopyUtil.resourceShouldBeExcluded(parentSourcePath, e.Exclude, includeExcludeMatchPath) {
return false
}
// check if blobType of the current blob is present in the list of blob type to exclude.
for _, blobType := range e.ExcludeBlobType {
if blobItem.Properties.BlobType == blobType {
return false
}
}
return true
}
// enumerate blob in containers, and add matched blob into transfer.
return enumerateBlobsInContainer(
ctx,
srcContainerURL,
blobNamePrefix,
blobFilter,
func(blobItem azblob.BlobItem) error {
var blobRelativePath = ""
// As downloading logic temporarily, refactor after scenario ensured.
if isWildcardSearch {
blobRelativePath = strings.Replace(blobItem.Name, blobNamePrefix[:strings.LastIndex(blobNamePrefix, common.AZCOPY_PATH_SEPARATOR_STRING)+1], "", 1)
} else {
blobRelativePath = gCopyUtil.getRelativePath(blobNamePrefix, blobItem.Name)
}
return e.addBlobToNTransfer(
srcContainerURL.NewBlobURL(blobItem.Name).URL(),
urlExtension{URL: destBaseURL}.generateObjectPath(blobRelativePath),
&blobItem.Properties,
blobItem.Metadata,
cca)
})
}
func (e *copyS2SMigrationBlobEnumerator) addBlobToNTransfer(srcURL, destURL url.URL, properties *azblob.BlobProperties, metadata azblob.Metadata,
cca *cookedCopyCmdArgs) error {
return e.addTransfer(
common.CopyTransfer{
Source: gCopyUtil.stripSASFromBlobUrl(srcURL).String(),
Destination: gCopyUtil.stripSASFromBlobUrl(destURL).String(),
LastModifiedTime: properties.LastModified,
SourceSize: *properties.ContentLength,
ContentType: *properties.ContentType,
ContentEncoding: *properties.ContentEncoding,
ContentDisposition: *properties.ContentDisposition,
ContentLanguage: *properties.ContentLanguage,
CacheControl: *properties.CacheControl,
ContentMD5: properties.ContentMD5,
Metadata: common.FromAzBlobMetadataToCommonMetadata(metadata),
BlobType: properties.BlobType,
BlobTier: e.getAccessTier(properties.AccessTier, cca.s2sPreserveAccessTier),
},
cca)
}
func (e *copyS2SMigrationBlobEnumerator) addBlobToNTransfer2(srcURL, destURL url.URL, properties *azblob.BlobGetPropertiesResponse,
cca *cookedCopyCmdArgs) error {
return e.addTransfer(
common.CopyTransfer{
Source: gCopyUtil.stripSASFromBlobUrl(srcURL).String(),
Destination: gCopyUtil.stripSASFromBlobUrl(destURL).String(),
LastModifiedTime: properties.LastModified(),
SourceSize: properties.ContentLength(),
ContentType: properties.ContentType(),
ContentEncoding: properties.ContentEncoding(),
ContentDisposition: properties.ContentDisposition(),
ContentLanguage: properties.ContentLanguage(),
CacheControl: properties.CacheControl(),
ContentMD5: properties.ContentMD5(),
Metadata: common.FromAzBlobMetadataToCommonMetadata(properties.NewMetadata()),
BlobType: properties.BlobType(),
BlobTier: e.getAccessTier(azblob.AccessTierType(properties.AccessTier()), cca.s2sPreserveAccessTier),
},
cca)
}
func (e *copyS2SMigrationBlobEnumerator) getAccessTier(accessTier azblob.AccessTierType, s2sPreserveAccessTier bool) azblob.AccessTierType {
return azblob.AccessTierType(common.IffString(s2sPreserveAccessTier, string(accessTier), string(azblob.AccessTierNone)))
}
func (e *copyS2SMigrationBlobEnumerator) addTransfer(transfer common.CopyTransfer, cca *cookedCopyCmdArgs) error {
return addTransfer(&(e.CopyJobPartOrderRequest), transfer, cca)
}
func (e *copyS2SMigrationBlobEnumerator) dispatchFinalPart(cca *cookedCopyCmdArgs) error {
return dispatchFinalPart(&(e.CopyJobPartOrderRequest), cca)
}
func (e *copyS2SMigrationBlobEnumerator) partNum() common.PartNumber {
return e.PartNum
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"context"
"fmt"
"net/url"
"os"
"time"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/azbfs"
"github.com/Azure/azure-storage-azcopy/common"
)
type blobFSUploader struct {
jptm IJobPartTransferMgr
fileURL azbfs.FileURL
chunkSize uint32
numChunks uint32
pipeline pipeline.Pipeline
pacer *pacer
md5Channel chan []byte
creationTimeHeaders *azbfs.BlobFSHTTPHeaders
}
func newBlobFSUploader(jptm IJobPartTransferMgr, destination string, p pipeline.Pipeline, pacer *pacer, sip ISourceInfoProvider) (ISenderBase, error) {
info := jptm.Info()
// make sure URL is parsable
destURL, err := url.Parse(destination)
if err != nil {
return nil, err
}
// Get the file/dir Info to determine whether source is a file or directory
// since url to upload files and directories is different
fInfo, err := os.Stat(info.Source)
if err != nil {
return nil, err
}
if fInfo.IsDir() {
panic("directory transfers not yet supported")
// TODO perhaps implement this by returning a different uploader type...
// Note that when doing so, remember our rule that all uploaders process 1 chunk
// The returned type will just do one pseudo chunk, in which it creates the directory
/* for the record, here is what the chunkFunc used to do, in the directory case - even though that code was never actually called in the current release,
because, as at 1 Jan 2019, we don't actually pass in directories here. But if we do, this code below could be repacked into an uploader
if fInfo.IsDir() {
dirUrl := azbfs.NewDirectoryURL(*dUrl, p)
_, err := dirUrl.Create(jptm.Context())
if err != nil {
// Note: As description in document https://docs.microsoft.com/en-us/rest/api/storageservices/datalakestoragegen2/path/create,
// the default behavior of creating directory is overwrite, unless there is lease, or destination exists, and there is If-None-Match:"*".
// Check for overwrite flag correspondingly, if overwrite is true, and fail to recreate directory, report error.
// If overwrite is false, and fail to recreate directoroy, report directory already exists.
if !jptm.IsForceWriteTrue() {
if stgErr, ok := err.(azbfs.StorageError); ok && stgErr.Response().StatusCode == http.StatusConflict {
jptm.LogUploadError(info.Source, info.Destination, "Directory already exists ", 0)
// Mark the transfer as failed with ADLSGen2PathAlreadyExistsFailure
jptm.SetStatus(common.ETransferStatus.ADLSGen2PathAlreadyExistsFailure())
jptm.ReportTransferDone()
return
}
}
status, msg := ErrorEx{err}.ErrorCodeAndString()
jptm.LogUploadError(info.Source, info.Destination, "Directory creation error "+msg, status)
if jptm.WasCanceled() {
transferDone(jptm.TransferStatus())
} else {
transferDone(common.ETransferStatus.Failed())
}
return
}
if jptm.ShouldLog(pipeline.LogInfo) {
jptm.Log(pipeline.LogInfo, "UPLOAD SUCCESSFUL")
}
transferDone(common.ETransferStatus.Success())
return
}
*/
}
// compute chunk size and number of chunks
chunkSize := info.BlockSize
numChunks := getNumChunks(info.SourceSize, chunkSize)
return &blobFSUploader{
jptm: jptm,
fileURL: azbfs.NewFileURL(*destURL, p),
chunkSize: chunkSize,
numChunks: numChunks,
pipeline: p,
pacer: pacer,
md5Channel: newMd5Channel(),
}, nil
}
func (u *blobFSUploader) ChunkSize() uint32 {
return u.chunkSize
}
func (u *blobFSUploader) NumChunks() uint32 {
return u.numChunks
}
func (u *blobFSUploader) Md5Channel() chan<- []byte {
// TODO: can we support this? And when? Right now, we are returning it, but never using it ourselves
return u.md5Channel
}
func (u *blobFSUploader) RemoteFileExists() (bool, error) {
return remoteObjectExists(u.fileURL.GetProperties(u.jptm.Context()))
}
func (u *blobFSUploader) Prologue(state common.PrologueState) {
jptm := u.jptm
h := jptm.BfsDstData(state.LeadingBytes)
u.creationTimeHeaders = &h
// Create file with the source size
_, err := u.fileURL.Create(u.jptm.Context(), h) // note that "create" actually calls "create path"
if err != nil {
u.jptm.FailActiveUpload("Creating file", err)
return
}
}
func (u *blobFSUploader) GenerateUploadFunc(id common.ChunkID, blockIndex int32, reader common.SingleChunkReader, chunkIsWholeFile bool) chunkFunc {
return createSendToRemoteChunkFunc(u.jptm, id, func() {
jptm := u.jptm
if jptm.Info().SourceSize == 0 {
// nothing to do, since this is a dummy chunk in a zero-size file, and the prologue will have done all the real work
return
}
// upload the byte range represented by this chunk
jptm.LogChunkStatus(id, common.EWaitReason.Body())
body := newLiteRequestBodyPacer(reader, u.pacer)
_, err := u.fileURL.AppendData(jptm.Context(), id.OffsetInFile, body) // note: AppendData is really UpdatePath with "append" action
if err != nil {
jptm.FailActiveUpload("Uploading range", err)
return
}
})
}
func (u *blobFSUploader) Epilogue() {
jptm := u.jptm
// flush
if jptm.TransferStatus() > 0 {
md5Hash, ok := <-u.md5Channel
if ok {
//Type assertions aren't my favorite thing to do but it does work when you need it to.
_, err := u.fileURL.FlushData(jptm.Context(), jptm.Info().SourceSize, md5Hash, *u.creationTimeHeaders)
if err != nil {
jptm.FailActiveUpload("Flushing data", err)
// don't return, since need cleanup below
}
} else {
jptm.FailActiveUpload("Getting hash", errNoHash)
// don't return, since need cleanup below
}
}
// Cleanup if status is now failed
if jptm.TransferStatus() <= 0 {
// If the transfer status is less than or equal to 0
// then transfer was either failed or cancelled
// the file created in share needs to be deleted, since it's
// contents will be at an unknown stage of partial completeness
deletionContext, cancelFn := context.WithTimeout(context.Background(), 2*time.Minute)
defer cancelFn()
_, err := u.fileURL.Delete(deletionContext)
if err != nil {
jptm.Log(pipeline.LogError, fmt.Sprintf("error deleting the (incomplete) file %s. Failed with error %s", u.fileURL.String(), err.Error()))
}
}
}
<file_sep>package cmd
import (
"os"
"path"
)
// GetAzCopyAppPath returns the path of Azcopy in local appdata.
func GetAzCopyAppPath() string {
userProfile := os.Getenv("USERPROFILE")
azcopyAppDataFolder := path.Join(userProfile, ".azcopy")
if err := os.Mkdir(azcopyAppDataFolder, os.ModeDir); err != nil && !os.IsExist(err) {
return ""
}
return azcopyAppDataFolder
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package common_test
import (
"github.com/Azure/azure-storage-azcopy/common"
chk "gopkg.in/check.v1"
)
type feSteModelsTestSuite struct{}
var _ = chk.Suite(&feSteModelsTestSuite{})
func (s *feSteModelsTestSuite) TestEnhanceJobStatusInfo(c *chk.C) {
status := common.EJobStatus
status = status.EnhanceJobStatusInfo(true, true, true)
c.Assert(status, chk.Equals, common.EJobStatus.CompletedWithErrorsAndSkipped())
status = status.EnhanceJobStatusInfo(true, true, false)
c.Assert(status, chk.Equals, common.EJobStatus.CompletedWithErrorsAndSkipped())
status = status.EnhanceJobStatusInfo(true, false, true)
c.Assert(status, chk.Equals, common.EJobStatus.CompletedWithSkipped())
status = status.EnhanceJobStatusInfo(true, false, false)
c.Assert(status, chk.Equals, common.EJobStatus.CompletedWithSkipped())
status = status.EnhanceJobStatusInfo(false, true, true)
c.Assert(status, chk.Equals, common.EJobStatus.CompletedWithErrors())
status = status.EnhanceJobStatusInfo(false, true, false)
c.Assert(status, chk.Equals, common.EJobStatus.Failed())
status = status.EnhanceJobStatusInfo(false, false, true)
c.Assert(status, chk.Equals, common.EJobStatus.Completed())
// No-op if all are false
status = status.EnhanceJobStatusInfo(false, false, false)
c.Assert(status, chk.Equals, common.EJobStatus.Completed())
}
func (s *feSteModelsTestSuite) TestIsJobDone(c *chk.C) {
status := common.EJobStatus.InProgress()
c.Assert(status.IsJobDone(), chk.Equals, false)
status = status.Paused()
c.Assert(status.IsJobDone(), chk.Equals, false)
status = status.Cancelling()
c.Assert(status.IsJobDone(), chk.Equals, false)
status = status.Cancelled()
c.Assert(status.IsJobDone(), chk.Equals, true)
status = status.Completed()
c.Assert(status.IsJobDone(), chk.Equals, true)
status = status.CompletedWithErrors()
c.Assert(status.IsJobDone(), chk.Equals, true)
status = status.CompletedWithSkipped()
c.Assert(status.IsJobDone(), chk.Equals, true)
status = status.CompletedWithErrors()
c.Assert(status.IsJobDone(), chk.Equals, true)
status = status.CompletedWithErrorsAndSkipped()
c.Assert(status.IsJobDone(), chk.Equals, true)
status = status.Failed()
c.Assert(status.IsJobDone(), chk.Equals, true)
}
func getInvalidMetadataSample() common.Metadata {
m := make(map[string]string)
// number could not be first char for azure metadata key.
m["1abc"] = "v:1abc"
// special char
m["a!@#"] = "v:a!@#"
m["a-metadata-samplE"] = "v:a-metadata-samplE"
// valid metadata
m["abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRUSTUVWXYZ1234567890_"] = "v:abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRUSTUVWXYZ1234567890_"
m["Am"] = "v:Am"
m["_123"] = "v:_123"
return m
}
func getValidMetadataSample() common.Metadata {
m := make(map[string]string)
m["Key"] = "value"
return m
}
func validateMapEqual(c *chk.C, m1 map[string]string, m2 map[string]string) {
c.Assert(len(m1), chk.Equals, len(m2))
for k1, v1 := range m1 {
c.Assert(m2[k1], chk.Equals, v1)
}
}
func (s *feSteModelsTestSuite) TestMetadataExcludeInvalidKey(c *chk.C) {
mInvalid := getInvalidMetadataSample()
mValid := getValidMetadataSample()
retainedMetadata, excludedMetadata, invalidKeyExists := mInvalid.ExcludeInvalidKey()
c.Assert(invalidKeyExists, chk.Equals, true)
validateMapEqual(c, retainedMetadata,
map[string]string{"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRUSTUVWXYZ1234567890_": "v:abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRUSTUVWXYZ1234567890_",
"Am": "v:Am", "_123": "v:_123"})
validateMapEqual(c, excludedMetadata,
map[string]string{"1abc": "v:1abc", "a!@#": "v:a!@#", "a-metadata-samplE": "v:a-metadata-samplE"})
retainedMetadata, excludedMetadata, invalidKeyExists = mValid.ExcludeInvalidKey()
c.Assert(invalidKeyExists, chk.Equals, false)
validateMapEqual(c, retainedMetadata, map[string]string{"Key": "value"})
c.Assert(len(excludedMetadata), chk.Equals, 0)
c.Assert(retainedMetadata.ConcatenatedKeys(), chk.Equals, "'Key' ")
}
func (s *feSteModelsTestSuite) TestMetadataResolveInvalidKey(c *chk.C) {
mInvalid := getInvalidMetadataSample()
mValid := getValidMetadataSample()
resolvedMetadata, err := mInvalid.ResolveInvalidKey()
c.Assert(err, chk.IsNil)
validateMapEqual(c, resolvedMetadata,
map[string]string{"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRUSTUVWXYZ1234567890_": "v:abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRUSTUVWXYZ1234567890_",
"Am": "v:Am", "_123": "v:_123", "rename_1abc": "v:1abc", "rename_key_1abc": "1abc", "rename_a___": "v:a!@#", "rename_key_a___": "a!@#",
"rename_a_metadata_samplE": "v:a-metadata-samplE", "rename_key_a_metadata_samplE": "a-metadata-samplE"})
resolvedMetadata, err = mValid.ResolveInvalidKey()
c.Assert(err, chk.IsNil)
validateMapEqual(c, resolvedMetadata, map[string]string{"Key": "value"})
}
// In this phase we keep the resolve logic easy, and whenever there is key resolving collision found, error reported.
func (s *feSteModelsTestSuite) TestMetadataResolveInvalidKeyNegative(c *chk.C) {
mNegative1 := common.Metadata(map[string]string{"!": "!", "*": "*"})
mNegative2 := common.Metadata(map[string]string{"!": "!", "rename__": "rename__"})
mNegative3 := common.Metadata(map[string]string{"!": "!", "rename_key__": "rename_key__"})
_, err := mNegative1.ResolveInvalidKey()
c.Assert(err, chk.NotNil)
_, err = mNegative2.ResolveInvalidKey()
c.Assert(err, chk.NotNil)
_, err = mNegative3.ResolveInvalidKey()
c.Assert(err, chk.NotNil)
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package common
import (
"net/url"
"strings"
chk "gopkg.in/check.v1"
)
// Hookup to the testing framework
type s3URLPartsTestSuite struct{}
var _ = chk.Suite(&s3URLPartsTestSuite{})
func (s *s3URLPartsTestSuite) TestS3URLParse(c *chk.C) {
u, _ := url.Parse("http://bucket.s3.amazonaws.com")
p, err := NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Host, chk.Equals, "bucket.s3.amazonaws.com")
c.Assert(p.Endpoint, chk.Equals, "s3.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "bucket")
c.Assert(p.ObjectKey, chk.Equals, "")
c.Assert(p.Region, chk.Equals, "")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "http://bucket.s3.amazonaws.com")
u, _ = url.Parse("http://bucket.s3.amazonaws.com/")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.BucketName, chk.Equals, "bucket")
c.Assert(p.Endpoint, chk.Equals, "s3.amazonaws.com")
c.Assert(p.ObjectKey, chk.Equals, "")
c.Assert(p.Region, chk.Equals, "")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "http://bucket.s3.amazonaws.com")
u, _ = url.Parse("http://bucket.s3-aws-region.amazonaws.com/keydir/keysubdir/keyname")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3-aws-region.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "bucket")
c.Assert(p.ObjectKey, chk.Equals, "keydir/keysubdir/keyname")
c.Assert(p.Region, chk.Equals, "aws-region")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "http://bucket.s3-aws-region.amazonaws.com/keydir/keysubdir/keyname")
u, _ = url.Parse("http://bucket.s3-aws-region.amazonaws.com/keyname")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3-aws-region.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "bucket")
c.Assert(p.ObjectKey, chk.Equals, "keyname")
c.Assert(p.Region, chk.Equals, "aws-region")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "http://bucket.s3-aws-region.amazonaws.com/keyname")
u, _ = url.Parse("http://bucket.s3-aws-region.amazonaws.com/keyname/")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3-aws-region.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "bucket")
c.Assert(p.ObjectKey, chk.Equals, "keyname/")
c.Assert(p.Region, chk.Equals, "aws-region")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "http://bucket.s3-aws-region.amazonaws.com/keyname/")
// dual stack
u, _ = url.Parse("http://bucket.s3.dualstack.aws-region.amazonaws.com/keyname/")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3.dualstack.aws-region.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "bucket")
c.Assert(p.ObjectKey, chk.Equals, "keyname/")
c.Assert(p.Region, chk.Equals, "aws-region")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "http://bucket.s3.dualstack.aws-region.amazonaws.com/keyname/")
u, _ = url.Parse("https://s3.amazonaws.com")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "")
c.Assert(p.ObjectKey, chk.Equals, "")
c.Assert(p.Region, chk.Equals, "")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "https://s3.amazonaws.com")
u, _ = url.Parse("https://s3.amazonaws.com/")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "")
c.Assert(p.ObjectKey, chk.Equals, "")
c.Assert(p.Region, chk.Equals, "")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "https://s3.amazonaws.com")
u, _ = url.Parse("https://s3-ap-southeast-1.amazonaws.com/")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3-ap-southeast-1.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "")
c.Assert(p.ObjectKey, chk.Equals, "")
c.Assert(p.Region, chk.Equals, "ap-southeast-1")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "https://s3-ap-southeast-1.amazonaws.com")
u, _ = url.Parse("https://s3-ap-southeast-1.amazonaws.com/jiac-art-awsbucket01")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3-ap-southeast-1.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "jiac-art-awsbucket01")
c.Assert(p.ObjectKey, chk.Equals, "")
c.Assert(p.Region, chk.Equals, "ap-southeast-1")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "https://s3-ap-southeast-1.amazonaws.com/jiac-art-awsbucket01")
u, _ = url.Parse("https://s3-ap-southeast-1.amazonaws.com/jiac-art-awsbucket01/")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3-ap-southeast-1.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "jiac-art-awsbucket01")
c.Assert(p.ObjectKey, chk.Equals, "")
c.Assert(p.Region, chk.Equals, "ap-southeast-1")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "https://s3-ap-southeast-1.amazonaws.com/jiac-art-awsbucket01")
u, _ = url.Parse("https://s3-ap-southeast-1.amazonaws.com/jiac-art-awsbucket01/Test.pdf")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3-ap-southeast-1.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "jiac-art-awsbucket01")
c.Assert(p.ObjectKey, chk.Equals, "Test.pdf")
c.Assert(p.Region, chk.Equals, "ap-southeast-1")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "https://s3-ap-southeast-1.amazonaws.com/jiac-art-awsbucket01/Test.pdf")
u, _ = url.Parse("https://s3-ap-southeast-1.amazonaws.com/jiac-art-awsbucket01/space+folder/Test.pdf")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3-ap-southeast-1.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "jiac-art-awsbucket01")
c.Assert(p.ObjectKey, chk.Equals, "space+folder/Test.pdf")
c.Assert(p.Region, chk.Equals, "ap-southeast-1")
c.Assert(p.Version, chk.Equals, "")
c.Assert(p.String(), chk.Equals, "https://s3-ap-southeast-1.amazonaws.com/jiac-art-awsbucket01/space+folder/Test.pdf")
// Version testing
u, _ = url.Parse("https://s3.ap-northeast-2.amazonaws.com/jiac-art-awsbucket02-versionenabled/Test.pdf?versionId=Cy0pgpqHDTR7RlMEwU_BxDVER2QN5lJJ")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3.ap-northeast-2.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "jiac-art-awsbucket02-versionenabled")
c.Assert(p.ObjectKey, chk.Equals, "Test.pdf")
c.Assert(p.Region, chk.Equals, "ap-northeast-2")
c.Assert(p.Version, chk.Equals, "Cy0pgpqHDTR7RlMEwU_BxDVER2QN5lJJ")
c.Assert(p.String(), chk.Equals, "https://s3.ap-northeast-2.amazonaws.com/jiac-art-awsbucket02-versionenabled/Test.pdf?versionId=Cy0pgpqHDTR7RlMEwU_BxDVER2QN5lJJ")
// Version and dualstack testing
u, _ = url.Parse("https://s3.dualstack.ap-northeast-2.amazonaws.com/jiac-art-awsbucket02-versionenabled/Test.pdf?versionId=Cy0pgpqHDTR7RlMEwU_BxDVER2QN5lJJ")
p, err = NewS3URLParts(*u)
c.Assert(err, chk.IsNil)
c.Assert(p.Endpoint, chk.Equals, "s3.dualstack.ap-northeast-2.amazonaws.com")
c.Assert(p.BucketName, chk.Equals, "jiac-art-awsbucket02-versionenabled")
c.Assert(p.ObjectKey, chk.Equals, "Test.pdf")
c.Assert(p.Region, chk.Equals, "ap-northeast-2")
c.Assert(p.Version, chk.Equals, "Cy0pgpqHDTR7RlMEwU_BxDVER2QN5lJJ")
c.Assert(p.String(), chk.Equals, "https://s3.dualstack.ap-northeast-2.amazonaws.com/jiac-art-awsbucket02-versionenabled/Test.pdf?versionId=Cy0pgpqHDTR7RlMEwU_BxDVER2QN5lJJ")
}
func (s *s3URLPartsTestSuite) TestS3URLParseNegative(c *chk.C) {
u, _ := url.Parse("http://bucket.amazonawstypo.com")
_, err := NewS3URLParts(*u)
c.Assert(err, chk.NotNil)
c.Assert(strings.Contains(err.Error(), invalidS3URLErrorMessage), chk.Equals, true)
u, _ = url.Parse("http://bucket.s3.amazonawstypo.com")
_, err = NewS3URLParts(*u)
c.Assert(err, chk.NotNil)
c.Assert(strings.Contains(err.Error(), invalidS3URLErrorMessage), chk.Equals, true)
u, _ = url.Parse("http://s3-test.blob.core.windows.net")
_, err = NewS3URLParts(*u)
c.Assert(err, chk.NotNil)
c.Assert(strings.Contains(err.Error(), invalidS3URLErrorMessage), chk.Equals, true)
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"github.com/Azure/azure-storage-azcopy/common"
chk "gopkg.in/check.v1"
"strings"
)
func (s *cmdIntegrationSuite) TestRemoveSingleBlob(c *chk.C) {
bsu := getBSU()
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
for _, blobName := range []string{"top/mid/low/singleblobisbest", "打麻将.txt", "%4509%4254$85140&"} {
// set up the container with a single blob
blobList := []string{blobName}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobList)
c.Assert(containerURL, chk.NotNil)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawBlobURLWithSAS := scenarioHelper{}.getRawBlobURLWithSAS(c, containerName, blobList[0])
raw := getDefaultRemoveRawInput(rawBlobURLWithSAS.String(), true)
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// note that when we are targeting single blobs, the relative path is empty ("") since the root path already points to the blob
validateRemoveTransfersAreScheduled(c, true, []string{""}, mockedRPC)
})
}
}
func (s *cmdIntegrationSuite) TestRemoveBlobsUnderContainer(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultRemoveRawInput(rawContainerURLWithSAS.String(), true)
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(blobList))
// validate that the right transfers were sent
validateRemoveTransfersAreScheduled(c, true, blobList, mockedRPC)
})
// turn off recursive, this time only top blobs should be deleted
raw.recursive = false
mockedRPC.reset()
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
c.Assert(len(mockedRPC.transfers), chk.Not(chk.Equals), len(blobList))
for _, transfer := range mockedRPC.transfers {
c.Assert(strings.Contains(transfer.Source, common.AZCOPY_PATH_SEPARATOR_STRING), chk.Equals, false)
}
})
}
func (s *cmdIntegrationSuite) TestRemoveBlobsUnderVirtualDir(c *chk.C) {
bsu := getBSU()
vdirName := "vdir1/vdir2/vdir3/"
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, vdirName)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawVirtualDirectoryURLWithSAS := scenarioHelper{}.getRawBlobURLWithSAS(c, containerName, vdirName)
raw := getDefaultRemoveRawInput(rawVirtualDirectoryURLWithSAS.String(), true)
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(blobList))
// validate that the right transfers were sent
expectedTransfers := scenarioHelper{}.shaveOffPrefix(blobList, vdirName)
validateRemoveTransfersAreScheduled(c, true, expectedTransfers, mockedRPC)
})
// turn off recursive, this time only top blobs should be deleted
raw.recursive = false
mockedRPC.reset()
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
c.Assert(len(mockedRPC.transfers), chk.Not(chk.Equals), len(blobList))
for _, transfer := range mockedRPC.transfers {
c.Assert(strings.Contains(transfer.Source, common.AZCOPY_PATH_SEPARATOR_STRING), chk.Equals, false)
}
})
}
// include flag limits the scope of the delete
func (s *cmdIntegrationSuite) TestRemoveWithIncludeFlag(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// add special blobs that we wish to include
blobsToInclude := []string{"important.pdf", "includeSub/amazing.jpeg", "exactName"}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobsToInclude)
includeString := "*.pdf;*.jpeg;exactName"
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultRemoveRawInput(rawContainerURLWithSAS.String(), true)
raw.include = includeString
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", blobsToInclude, mockedRPC)
})
}
// exclude flag limits the scope of the delete
func (s *cmdIntegrationSuite) TestRemoveWithExcludeFlag(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// add special blobs that we wish to exclude
blobsToExclude := []string{"notGood.pdf", "excludeSub/lame.jpeg", "exactName"}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobsToExclude)
excludeString := "*.pdf;*.jpeg;exactName"
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultRemoveRawInput(rawContainerURLWithSAS.String(), true)
raw.exclude = excludeString
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", blobList, mockedRPC)
})
}
// include and exclude flag can work together to limit the scope of the delete
func (s *cmdIntegrationSuite) TestRemoveWithIncludeAndExcludeFlag(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// add special blobs that we wish to include
blobsToInclude := []string{"important.pdf", "includeSub/amazing.jpeg"}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobsToInclude)
includeString := "*.pdf;*.jpeg;exactName"
// add special blobs that we wish to exclude
// note that the excluded files also match the include string
blobsToExclude := []string{"sorry.pdf", "exclude/notGood.jpeg", "exactName", "sub/exactName"}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobsToExclude)
excludeString := "so*;not*;exactName"
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultRemoveRawInput(rawContainerURLWithSAS.String(), true)
raw.include = includeString
raw.exclude = excludeString
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", blobsToInclude, mockedRPC)
})
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"encoding/json"
"errors"
"fmt"
"github.com/Azure/azure-pipeline-go/pipeline"
"io"
"net/url"
"os"
"strings"
"time"
"io/ioutil"
"github.com/Azure/azure-storage-azcopy/azbfs"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-azcopy/ste"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/Azure/azure-storage-file-go/azfile"
"github.com/spf13/cobra"
)
// upload related
const uploadMaxTries = 5
const uploadTryTimeout = time.Minute * 10
const uploadRetryDelay = time.Second * 1
const uploadMaxRetryDelay = time.Second * 3
// download related
const downloadMaxTries = 5
const downloadTryTimeout = time.Minute * 10
const downloadRetryDelay = time.Second * 1
const downloadMaxRetryDelay = time.Second * 3
const pipingUploadParallelism = 5
const pipingDefaultBlockSize = 8 * 1024 * 1024
const pipeLocation = "~pipe~"
// represents the raw copy command input from the user
type rawCopyCmdArgs struct {
// from arguments
src string
dst string
fromTo string
//blobUrlForRedirection string
// TODO remove after refactoring
legacyInclude string
legacyExclude string
// new include/exclude only apply to file names
// implemented for remove (and sync) only
include string
exclude string
// filters from flags
listOfFilesToCopy string
recursive bool
followSymlinks bool
withSnapshots bool
// forceWrite flag is used to define the User behavior
// to overwrite the existing blobs or not.
forceWrite bool
// options from flags
blockSizeMB uint32
metadata string
contentType string
contentEncoding string
contentDisposition string
contentLanguage string
cacheControl string
noGuessMimeType bool
preserveLastModifiedTime bool
putMd5 bool
md5ValidationOption string
// defines the type of the blob at the destination in case of upload / account to account copy
blobType string
blockBlobTier string
pageBlobTier string
background bool
output string
acl string
logVerbosity string
cancelFromStdin bool
// list of blobTypes to exclude while enumerating the transfer
excludeBlobType string
// whether user wants to preserve full properties during service to service copy, the default value is true.
// For S3 and Azure File non-single file source, as list operation doesn't return full properties of objects/files,
// to preserve full properties AzCopy needs to send one additional request per object/file.
s2sPreserveProperties bool
// useful when preserveS3Properties set to true, enables get S3 objects' or Azure files' properties during s2s copy in backend, the default value is true
s2sGetPropertiesInBackend bool
// whether user wants to preserve access tier during service to service copy, the default value is true.
// In some case, e.g. target is a GPv1 storage account, access tier cannot be set properly.
// In such cases, use s2sPreserveAccessTier=false to bypass the access tier copy.
// For more details, please refer to https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers
s2sPreserveAccessTier bool
// whether user wants to check if source has changed after enumerating, the default value is true.
// For S2S copy, as source is a remote resource, validating whether source has changed need additional request costs.
s2sSourceChangeValidation bool
// specify how user wants to handle invalid metadata.
s2sInvalidMetadataHandleOption string
}
func (raw *rawCopyCmdArgs) parsePatterns(pattern string) (cookedPatterns []string) {
cookedPatterns = make([]string, 0)
rawPatterns := strings.Split(pattern, ";")
for _, pattern := range rawPatterns {
// skip the empty patterns
if len(pattern) != 0 {
cookedPatterns = append(cookedPatterns, pattern)
}
}
return
}
func (raw rawCopyCmdArgs) blockSizeInBytes() uint32 {
return raw.blockSizeMB * 1024 * 1024 // internally we use bytes, but users' convenience the command line uses MB
}
// validates and transform raw input into cooked input
func (raw rawCopyCmdArgs) cook() (cookedCopyCmdArgs, error) {
cooked := cookedCopyCmdArgs{}
fromTo, err := validateFromTo(raw.src, raw.dst, raw.fromTo) // TODO: src/dst
if err != nil {
return cooked, err
}
cooked.source = raw.src
cooked.destination = raw.dst
cooked.fromTo = fromTo
// copy&transform flags to type-safety
cooked.recursive = raw.recursive
cooked.followSymlinks = raw.followSymlinks
cooked.withSnapshots = raw.withSnapshots
cooked.forceWrite = raw.forceWrite
cooked.blockSize = raw.blockSizeInBytes()
// parse the given blob type.
err = cooked.blobType.Parse(raw.blobType)
if err != nil {
return cooked, err
}
// If the given blobType is AppendBlob, block-size-mb should not be greater than
// 4MB.
if cooked.blobType == common.EBlobType.AppendBlob() &&
raw.blockSizeInBytes() > common.MaxAppendBlobBlockSize {
return cooked, fmt.Errorf("block size cannot be greater than 4MB for AppendBlob blob type")
}
err = cooked.blockBlobTier.Parse(raw.blockBlobTier)
if err != nil {
return cooked, err
}
err = cooked.pageBlobTier.Parse(raw.pageBlobTier)
if err != nil {
return cooked, err
}
err = cooked.logVerbosity.Parse(raw.logVerbosity)
if err != nil {
return cooked, err
}
// User can provide either listOfFilesToCopy or include since listOFFiles mentions
// file names to include explicitly and include file may mention the pattern.
// This could conflict enumerating the files to queue up for transfer.
if len(raw.listOfFilesToCopy) > 0 && len(raw.legacyInclude) > 0 {
return cooked, fmt.Errorf("user provided argument with both listOfFilesToCopy and include flag. Only one should be provided")
}
// If the user provided the list of files explicitly to be copied, then parse the argument
// The user passes the location of json file which will have the list of files to be copied.
// The "json file" is chosen as input because there is limit on the number of characters that
// can be supplied with the argument, but Storage Explorer folks requirements was not to impose
// any limit on the number of files that can be copied.
if len(raw.listOfFilesToCopy) > 0 {
jsonFile, err := os.Open(raw.listOfFilesToCopy)
if err != nil {
return cooked, fmt.Errorf("cannot open %s file passed with the list-of-file flag", raw.listOfFilesToCopy)
}
// read opened json file as a byte array.
jsonBytes, err := ioutil.ReadAll(jsonFile)
if err != nil {
return cooked, fmt.Errorf("error %s read %s file passed with the list-of-file flag", err.Error(), raw.listOfFilesToCopy)
}
var files common.ListOfFiles
err = json.Unmarshal(jsonBytes, &files)
if err != nil {
return cooked, fmt.Errorf("error %s unmarshalling the contents of %s file passed with the list-of-file flag", err.Error(), raw.listOfFilesToCopy)
}
for _, file := range files.Files {
// If split of the include string leads to an empty string
// not include that string
if len(file) == 0 {
continue
}
// replace the OS path separator in includePath string with AZCOPY_PATH_SEPARATOR
// this replacement is done to handle the windows file paths where path separator "\\"
filePath := strings.Replace(file, common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
cooked.listOfFilesToCopy = append(cooked.listOfFilesToCopy, filePath)
}
}
// initialize the include map which contains the list of files to be included
// parse the string passed in include flag
// more than one file are expected to be separated by ';'
cooked.legacyInclude = make(map[string]int)
if len(raw.legacyInclude) > 0 {
files := strings.Split(raw.legacyInclude, ";")
for index := range files {
// If split of the include string leads to an empty string
// not include that string
if len(files[index]) == 0 {
continue
}
// replace the OS path separator in includePath string with AZCOPY_PATH_SEPARATOR
// this replacement is done to handle the windows file paths where path separator "\\"
includePath := strings.Replace(files[index], common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
cooked.legacyInclude[includePath] = index
}
}
// initialize the exclude map which contains the list of files to be excluded
// parse the string passed in exclude flag
// more than one file are expected to be separated by ';'
cooked.legacyExclude = make(map[string]int)
if len(raw.legacyExclude) > 0 {
files := strings.Split(raw.legacyExclude, ";")
for index := range files {
// If split of the include string leads to an empty string
// not include that string
if len(files[index]) == 0 {
continue
}
// replace the OS path separator in excludePath string with AZCOPY_PATH_SEPARATOR
// this replacement is done to handle the windows file paths where path separator "\\"
excludePath := strings.Replace(files[index], common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
cooked.legacyExclude[excludePath] = index
}
}
cooked.metadata = raw.metadata
cooked.contentType = raw.contentType
cooked.contentEncoding = raw.contentEncoding
cooked.contentLanguage = raw.contentLanguage
cooked.contentDisposition = raw.contentDisposition
cooked.cacheControl = raw.cacheControl
cooked.noGuessMimeType = raw.noGuessMimeType
cooked.preserveLastModifiedTime = raw.preserveLastModifiedTime
cooked.putMd5 = raw.putMd5
err = cooked.md5ValidationOption.Parse(raw.md5ValidationOption)
if err != nil {
return cooked, err
}
cooked.background = raw.background
cooked.acl = raw.acl
cooked.cancelFromStdin = raw.cancelFromStdin
// if redirection is triggered, avoid printing any output
if cooked.isRedirection() {
glcm.SetOutputFormat(common.EOutputFormat.None())
}
// generate a unique job ID
cooked.jobID = common.NewJobID()
// check for the flag value relative to fromTo location type
// Example1: for Local to Blob, preserve-last-modified-time flag should not be set to true
// Example2: for Blob to Local, follow-symlinks, blob-tier flags should not be provided with values.
switch cooked.fromTo {
case common.EFromTo.LocalBlobFS():
if cooked.blobType != common.EBlobType.None() {
return cooked, fmt.Errorf("blob-type is not supported on ADLS Gen 2")
}
case common.EFromTo.LocalBlob():
if cooked.preserveLastModifiedTime {
return cooked, fmt.Errorf("preserve-last-modified-time is not supported while uploading")
}
if cooked.s2sPreserveProperties {
return cooked, fmt.Errorf("s2s-preserve-properties is not supported while uploading")
}
if cooked.s2sPreserveAccessTier {
return cooked, fmt.Errorf("s2s-preserve-access-tier is not supported while uploading")
}
if cooked.s2sInvalidMetadataHandleOption != common.DefaultInvalidMetadataHandleOption {
return cooked, fmt.Errorf("s2s-handle-invalid-metadata is not supported while uploading")
}
if cooked.s2sSourceChangeValidation {
return cooked, fmt.Errorf("s2s-detect-source-changed is not supported while uploading")
}
case common.EFromTo.LocalFile():
if cooked.preserveLastModifiedTime {
return cooked, fmt.Errorf("preserve-last-modified-time is not supported while uploading")
}
if cooked.blockBlobTier != common.EBlockBlobTier.None() ||
cooked.pageBlobTier != common.EPageBlobTier.None() {
return cooked, fmt.Errorf("blob-tier is not supported while uploading to Azure File")
}
if cooked.s2sPreserveProperties {
return cooked, fmt.Errorf("s2s-preserve-properties is not supported while uploading")
}
if cooked.s2sPreserveAccessTier {
return cooked, fmt.Errorf("s2s-preserve-access-tier is not supported while uploading")
}
if cooked.s2sInvalidMetadataHandleOption != common.DefaultInvalidMetadataHandleOption {
return cooked, fmt.Errorf("s2s-handle-invalid-metadata is not supported while uploading")
}
if cooked.s2sSourceChangeValidation {
return cooked, fmt.Errorf("s2s-detect-source-changed is not supported while uploading")
}
case common.EFromTo.BlobLocal(),
common.EFromTo.FileLocal():
if cooked.followSymlinks {
return cooked, fmt.Errorf("follow-symlinks flag is not supported while downloading")
}
if cooked.blockBlobTier != common.EBlockBlobTier.None() ||
cooked.pageBlobTier != common.EPageBlobTier.None() {
return cooked, fmt.Errorf("blob-tier is not supported while downloading")
}
if cooked.noGuessMimeType {
return cooked, fmt.Errorf("no-guess-mime-type is not supported while downloading")
}
if len(cooked.contentType) > 0 || len(cooked.contentEncoding) > 0 || len(cooked.contentLanguage) > 0 || len(cooked.contentDisposition) > 0 || len(cooked.cacheControl) > 0 || len(cooked.metadata) > 0 {
return cooked, fmt.Errorf("content-type, content-encoding, content-language, content-disposition, cache-control, or metadata is not supported while downloading")
}
if cooked.s2sPreserveProperties {
return cooked, fmt.Errorf("s2s-preserve-properties is not supported while downloading")
}
if cooked.s2sPreserveAccessTier {
return cooked, fmt.Errorf("s2s-preserve-access-tier is not supported while downloading")
}
if cooked.s2sInvalidMetadataHandleOption != common.DefaultInvalidMetadataHandleOption {
return cooked, fmt.Errorf("s2s-handle-invalid-metadata is not supported while downloading")
}
if cooked.s2sSourceChangeValidation {
return cooked, fmt.Errorf("s2s-detect-source-changed is not supported while downloading")
}
case common.EFromTo.BlobBlob(),
common.EFromTo.FileBlob(),
common.EFromTo.S3Blob():
if cooked.preserveLastModifiedTime {
return cooked, fmt.Errorf("preserve-last-modified-time is not supported while copying from service to service")
}
if cooked.followSymlinks {
return cooked, fmt.Errorf("follow-symlinks flag is not supported while copying from service to service")
}
// Disabling blob tier override, when copying block -> block blob or page -> page blob, blob tier will be kept,
// For s3 and file, only hot block blob tier is supported.
if cooked.blockBlobTier != common.EBlockBlobTier.None() ||
cooked.pageBlobTier != common.EPageBlobTier.None() {
return cooked, fmt.Errorf("blob-tier is not supported while copying from sevice to service")
}
// Disabling blob type override.
// i.e. not support block -> append/page, append -> block/page, page -> append/block,
// and when file and s3 is source, only block blob destination is supported.
if cooked.blobType != common.EBlobType.None() {
return cooked, fmt.Errorf("blob-type is not supported while coping from service to service")
}
if cooked.noGuessMimeType {
return cooked, fmt.Errorf("no-guess-mime-type is not supported while copying from service to service")
}
if len(cooked.contentType) > 0 || len(cooked.contentEncoding) > 0 || len(cooked.contentLanguage) > 0 || len(cooked.contentDisposition) > 0 || len(cooked.cacheControl) > 0 || len(cooked.metadata) > 0 {
return cooked, fmt.Errorf("content-type, content-encoding, content-language, content-disposition, cache-control, or metadata is not supported while copying from service to service")
}
}
if err = validatePutMd5(cooked.putMd5, cooked.fromTo); err != nil {
return cooked, err
}
if err = validateMd5Option(cooked.md5ValidationOption, cooked.fromTo); err != nil {
return cooked, err
}
// If the user has provided some input with excludeBlobType flag, parse the input.
if len(raw.excludeBlobType) > 0 {
// Split the string using delimeter ';' and parse the individual blobType
blobTypes := strings.Split(raw.excludeBlobType, ";")
for _, blobType := range blobTypes {
var eBlobType common.BlobType
err := eBlobType.Parse(blobType)
if err != nil {
return cooked, fmt.Errorf("error parsing the exclude-blob-type %s provided with exclude-blob-type flag ", blobType)
}
cooked.excludeBlobType = append(cooked.excludeBlobType, eBlobType.ToAzBlobType())
}
}
cooked.s2sPreserveProperties = raw.s2sPreserveProperties
cooked.s2sGetPropertiesInBackend = raw.s2sGetPropertiesInBackend
cooked.s2sPreserveAccessTier = raw.s2sPreserveAccessTier
cooked.s2sSourceChangeValidation = raw.s2sSourceChangeValidation
err = cooked.s2sInvalidMetadataHandleOption.Parse(raw.s2sInvalidMetadataHandleOption)
if err != nil {
return cooked, err
}
// parse the filter patterns
cooked.includePatterns = raw.parsePatterns(raw.include)
cooked.excludePatterns = raw.parsePatterns(raw.exclude)
return cooked, nil
}
func validatePutMd5(putMd5 bool, fromTo common.FromTo) error {
isUpload := fromTo.From() == common.ELocation.Local() && fromTo.To().IsRemote()
if putMd5 && !isUpload {
return fmt.Errorf("put-md5 is set but the job is not an upload")
}
return nil
}
func validateMd5Option(option common.HashValidationOption, fromTo common.FromTo) error {
hasMd5Validation := option != common.DefaultHashValidationOption
isDownload := fromTo.To() == common.ELocation.Local()
if hasMd5Validation && !isDownload {
return fmt.Errorf("check-md5 is set but the job is not a download")
}
return nil
}
// represents the processed copy command input from the user
type cookedCopyCmdArgs struct {
// from arguments
source string
sourceSAS string
destination string
destinationSAS string
fromTo common.FromTo
// TODO remove after refactoring
legacyInclude map[string]int
legacyExclude map[string]int
// new include/exclude only apply to file names
// implemented for remove (and sync) only
includePatterns []string
excludePatterns []string
// filters from flags
listOfFilesToCopy []string
recursive bool
followSymlinks bool
withSnapshots bool
forceWrite bool
// options from flags
blockSize uint32
// list of blobTypes to exclude while enumerating the transfer
excludeBlobType []azblob.BlobType
blobType common.BlobType
blockBlobTier common.BlockBlobTier
pageBlobTier common.PageBlobTier
metadata string
contentType string
contentEncoding string
contentLanguage string
contentDisposition string
cacheControl string
noGuessMimeType bool
preserveLastModifiedTime bool
putMd5 bool
md5ValidationOption common.HashValidationOption
background bool
acl string
logVerbosity common.LogLevel
cancelFromStdin bool
// commandString hold the user given command which is logged to the Job log file
commandString string
// generated
jobID common.JobID
// extracted from the input
credentialInfo common.CredentialInfo
// variables used to calculate progress
// intervalStartTime holds the last time value when the progress summary was fetched
// the value of this variable is used to calculate the throughput
// it gets updated every time the progress summary is fetched
intervalStartTime time.Time
intervalBytesTransferred uint64
// used to calculate job summary
jobStartTime time.Time
// this flag is set by the enumerator
// it is useful to indicate whether we are simply waiting for the purpose of cancelling
isEnumerationComplete bool
// whether user wants to preserve full properties during service to service copy, the default value is true.
// For S3 and Azure File non-single file source, as list operation doesn't return full properties of objects/files,
// to preserve full properties AzCopy needs to send one additional request per object/file.
s2sPreserveProperties bool
// useful when preserveS3Properties set to true, enables get S3 objects' or Azure files' properties during s2s copy in backend, the default value is true
s2sGetPropertiesInBackend bool
// whether user wants to preserve access tier during service to service copy, the default value is true.
// In some case, e.g. target is a GPv1 storage account, access tier cannot be set properly.
// In such cases, use s2sPreserveAccessTier=false to bypass the access tier copy.
// For more details, please refer to https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers
s2sPreserveAccessTier bool
// whether user wants to check if source has changed after enumerating, the default value is true.
// For S2S copy, as source is a remote resource, validating whether source has changed need additional request costs.
s2sSourceChangeValidation bool
// specify how user wants to handle invalid metadata.
s2sInvalidMetadataHandleOption common.InvalidMetadataHandleOption
}
func (cca *cookedCopyCmdArgs) isRedirection() bool {
switch cca.fromTo {
case common.EFromTo.BlobPipe():
fallthrough
case common.EFromTo.PipeBlob():
return true
default:
return false
}
}
func (cca *cookedCopyCmdArgs) process() error {
if cca.isRedirection() {
err := cca.processRedirectionCopy()
if err != nil {
return err
}
// if no error, the operation is now complete
glcm.Exit(nil, common.EExitCode.Success())
}
return cca.processCopyJobPartOrders()
}
// TODO discuss with Jeff what features should be supported by redirection, such as metadata, content-type, etc.
func (cca *cookedCopyCmdArgs) processRedirectionCopy() error {
if cca.fromTo == common.EFromTo.PipeBlob() {
return cca.processRedirectionUpload(cca.destination, cca.blockSize)
} else if cca.fromTo == common.EFromTo.BlobPipe() {
return cca.processRedirectionDownload(cca.source)
}
return fmt.Errorf("unsupported redirection type: %s", cca.fromTo)
}
func (cca *cookedCopyCmdArgs) processRedirectionDownload(blobUrl string) error {
// step 0: check the Stdout before uploading
_, err := os.Stdout.Stat()
if err != nil {
return fmt.Errorf("fatal: cannot write to Stdout due to error: %s", err.Error())
}
// step 1: initialize pipeline
p := azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{
Retry: azblob.RetryOptions{
Policy: azblob.RetryPolicyExponential,
MaxTries: downloadMaxTries,
TryTimeout: downloadTryTimeout,
RetryDelay: downloadRetryDelay,
MaxRetryDelay: downloadMaxRetryDelay,
},
Telemetry: azblob.TelemetryOptions{
Value: common.UserAgent,
},
})
// step 2: parse source url
u, err := url.Parse(blobUrl)
if err != nil {
return fmt.Errorf("fatal: cannot parse source blob URL due to error: %s", err.Error())
}
// step 3: start download
blobURL := azblob.NewBlobURL(*u, p)
blobStream, err := blobURL.Download(context.TODO(), 0, azblob.CountToEnd, azblob.BlobAccessConditions{}, false)
if err != nil {
return fmt.Errorf("fatal: cannot download blob due to error: %s", err.Error())
}
blobBody := blobStream.Body(azblob.RetryReaderOptions{MaxRetryRequests: downloadMaxTries})
defer blobBody.Close()
// step 4: pipe everything into Stdout
_, err = io.Copy(os.Stdout, blobBody)
if err != nil {
return fmt.Errorf("fatal: cannot download blob to Stdout due to error: %s", err.Error())
}
return nil
}
func (cca *cookedCopyCmdArgs) processRedirectionUpload(blobUrl string, blockSize uint32) error {
// if no block size is set, then use default value
if blockSize == 0 {
blockSize = pipingDefaultBlockSize
}
// step 0: initialize pipeline
p := azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{
Retry: azblob.RetryOptions{
Policy: azblob.RetryPolicyExponential,
MaxTries: uploadMaxTries,
TryTimeout: uploadTryTimeout,
RetryDelay: uploadRetryDelay,
MaxRetryDelay: uploadMaxRetryDelay,
},
Telemetry: azblob.TelemetryOptions{
Value: common.UserAgent,
},
})
// step 1: parse destination url
u, err := url.Parse(blobUrl)
if err != nil {
return fmt.Errorf("fatal: cannot parse destination blob URL due to error: %s", err.Error())
}
// step 2: leverage high-level call in Blob SDK to upload stdin in parallel
blockBlobUrl := azblob.NewBlockBlobURL(*u, p)
_, err = azblob.UploadStreamToBlockBlob(context.TODO(), os.Stdin, blockBlobUrl, azblob.UploadStreamToBlockBlobOptions{
BufferSize: int(blockSize),
MaxBuffers: pipingUploadParallelism,
})
return err
}
// handles the copy command
// dispatches the job order (in parts) to the storage engine
func (cca *cookedCopyCmdArgs) processCopyJobPartOrders() (err error) {
ctx := context.WithValue(context.TODO(), ste.ServiceAPIVersionOverride, ste.DefaultServiceApiVersion)
// verifies credential type and initializes credential info.
// Note: Currently, only one credential type is necessary for source and destination.
// For upload&download, only one side need credential.
// For S2S copy, as azcopy-v10 use Put*FromUrl, only one credential is needed for destination.
if cca.credentialInfo.CredentialType, err = getCredentialType(ctx, rawFromToInfo{
fromTo: cca.fromTo,
source: cca.source,
destination: cca.destination,
sourceSAS: cca.sourceSAS,
destinationSAS: cca.destinationSAS,
}); err != nil {
return err
}
// For OAuthToken credential, assign OAuthTokenInfo to CopyJobPartOrderRequest properly,
// the info will be transferred to STE.
if cca.credentialInfo.CredentialType == common.ECredentialType.OAuthToken() {
// Message user that they are using Oauth token for authentication,
// in case of silently using cached token without consciousness。
glcm.Info("Using OAuth token for authentication.")
uotm := GetUserOAuthTokenManagerInstance()
// Get token from env var or cache.
if tokenInfo, err := uotm.GetTokenInfo(ctx); err != nil {
return err
} else {
cca.credentialInfo.OAuthTokenInfo = *tokenInfo
}
}
// initialize the fields that are constant across all job part orders
jobPartOrder := common.CopyJobPartOrderRequest{
JobID: cca.jobID,
FromTo: cca.fromTo,
ForceWrite: cca.forceWrite,
Priority: common.EJobPriority.Normal(),
LogLevel: cca.logVerbosity,
Include: cca.legacyInclude,
Exclude: cca.legacyExclude,
ExcludeBlobType: cca.excludeBlobType,
BlobAttributes: common.BlobTransferAttributes{
BlobType: cca.blobType,
BlockSizeInBytes: cca.blockSize,
ContentType: cca.contentType,
ContentEncoding: cca.contentEncoding,
ContentLanguage: cca.contentLanguage,
ContentDisposition: cca.contentDisposition,
CacheControl: cca.cacheControl,
BlockBlobTier: cca.blockBlobTier,
PageBlobTier: cca.pageBlobTier,
Metadata: cca.metadata,
NoGuessMimeType: cca.noGuessMimeType,
PreserveLastModifiedTime: cca.preserveLastModifiedTime,
PutMd5: cca.putMd5,
MD5ValidationOption: cca.md5ValidationOption,
},
// source sas is stripped from the source given by the user and it will not be stored in the part plan file.
SourceSAS: cca.sourceSAS,
// destination sas is stripped from the destination given by the user and it will not be stored in the part plan file.
DestinationSAS: cca.destinationSAS,
CommandString: cca.commandString,
CredentialInfo: cca.credentialInfo,
}
// TODO remove this copy pasted code during refactoring
from := cca.fromTo.From()
to := cca.fromTo.To()
// Strip the SAS from the source and destination whenever there is SAS exists in URL.
// Note: SAS could exists in source of S2S copy, even if the credential type is OAuth for destination.
switch from {
case common.ELocation.Blob():
fromUrl, err := url.Parse(cca.source)
if err != nil {
return fmt.Errorf("error parsing the source url %s. Failed with error %s", fromUrl.String(), err.Error())
}
blobParts := azblob.NewBlobURLParts(*fromUrl)
cca.sourceSAS = blobParts.SAS.Encode()
jobPartOrder.SourceSAS = cca.sourceSAS
blobParts.SAS = azblob.SASQueryParameters{}
bUrl := blobParts.URL()
cca.source = bUrl.String()
// set the clean source root
bUrl.Path, _ = gCopyUtil.getRootPathWithoutWildCards(bUrl.Path)
jobPartOrder.SourceRoot = bUrl.String()
case common.ELocation.File():
fromUrl, err := url.Parse(cca.source)
if err != nil {
return fmt.Errorf("error parsing the source url %s. Failed with error %s", fromUrl.String(), err.Error())
}
fileParts := azfile.NewFileURLParts(*fromUrl)
cca.sourceSAS = fileParts.SAS.Encode()
if cca.sourceSAS == "" {
return fmt.Errorf("azure files only supports SAS token authentication")
}
jobPartOrder.SourceSAS = cca.sourceSAS
fileParts.SAS = azfile.SASQueryParameters{}
fUrl := fileParts.URL()
cca.source = fUrl.String()
// set the clean source root
fUrl.Path, _ = gCopyUtil.getRootPathWithoutWildCards(fUrl.Path)
jobPartOrder.SourceRoot = fUrl.String()
case common.ELocation.BlobFS():
// as at April 2019 we don't actually support SAS for BlobFS, but here we similar processing as the others because
// (a) it also escapes spaces in the source (and we need that done) and
// (b) if we ever do start supporting SASs for BlobFS, we don't want to forget to add code here to correctly process them
if redacted, _ := common.RedactSecretQueryParam(cca.source, "sig"); redacted {
panic("SAS in BlobFS is not yet supported")
}
fromUrl, err := url.Parse(cca.source)
if err != nil {
return fmt.Errorf("error parsing the source url %s. Failed with error %s", fromUrl.String(), err.Error())
}
bfsParts := azbfs.NewBfsURLParts(*fromUrl)
bfsUrl := bfsParts.URL()
cca.source = bfsUrl.String() // this escapes spaces in the source
// set the clean source root
bfsUrl.Path, _ = gCopyUtil.getRootPathWithoutWildCards(bfsUrl.Path)
jobPartOrder.SourceRoot = bfsUrl.String()
case common.ELocation.Local():
cca.source = cleanLocalPath(cca.source)
jobPartOrder.SourceRoot, _ = gCopyUtil.getRootPathWithoutWildCards(cca.source)
case common.ELocation.S3():
fromURL, err := url.Parse(cca.source)
if err != nil {
return fmt.Errorf("error parsing the source url %s. Failed with error %s", fromURL.String(), err.Error())
}
// S3 management console encode ' '(space) as '+', which is not supported by Azure resources.
// To support URL from S3 managment console, azcopy decode '+' as ' '(space).
*fromURL = common.URLExtension{URL: *fromURL}.URLWithPlusDecodedInPath()
cca.source = fromURL.String()
// set the clean source root
fromURL.Path, _ = gCopyUtil.getRootPathWithoutWildCards(fromURL.Path)
jobPartOrder.SourceRoot = fromURL.String()
default:
jobPartOrder.SourceRoot, _ = gCopyUtil.getRootPathWithoutWildCards(cca.source)
}
switch to {
case common.ELocation.Blob():
toUrl, err := url.Parse(cca.destination)
if err != nil {
return fmt.Errorf("error parsing the destination url %s. Failed with error %s", toUrl.String(), err.Error())
}
blobParts := azblob.NewBlobURLParts(*toUrl)
cca.destinationSAS = blobParts.SAS.Encode()
jobPartOrder.DestinationSAS = cca.destinationSAS
blobParts.SAS = azblob.SASQueryParameters{}
bUrl := blobParts.URL()
cca.destination = bUrl.String()
case common.ELocation.File():
toUrl, err := url.Parse(cca.destination)
if err != nil {
return fmt.Errorf("error parsing the destination url %s. Failed with error %s", toUrl.String(), err.Error())
}
fileParts := azfile.NewFileURLParts(*toUrl)
cca.destinationSAS = fileParts.SAS.Encode()
if cca.destinationSAS == "" {
return fmt.Errorf("azure files only supports SAS token authentication")
}
jobPartOrder.DestinationSAS = cca.destinationSAS
fileParts.SAS = azfile.SASQueryParameters{}
fUrl := fileParts.URL()
cca.destination = fUrl.String()
case common.ELocation.BlobFS():
// as at April 2019 we don't actually support SAS for BlobFS, but here we similar processing as the others because
// (a) it also escapes spaces in the destination (and we need that done) and
// (b) if we ever do start supporting SASs for BlobFS, we don't want to forget to add code here to correctly process them
if redacted, _ := common.RedactSecretQueryParam(cca.destination, "sig"); redacted {
panic("SAS in BlobFS is not yet supported")
}
toUrl, err := url.Parse(cca.destination)
if err != nil {
return fmt.Errorf("error parsing the destination url %s. Failed with error %s", toUrl.String(), err.Error())
}
bfsParts := azbfs.NewBfsURLParts(*toUrl)
bfsUrl := bfsParts.URL()
cca.destination = bfsUrl.String() // this escapes spaces in the destination
case common.ELocation.Local():
cca.destination = cleanLocalPath(cca.destination)
}
// set the root destination after it's been cleaned
jobPartOrder.DestinationRoot = cca.destination
// depending on the source and destination type, we process the cp command differently
// Create enumerator and do enumerating
switch cca.fromTo {
case common.EFromTo.LocalBlob():
fallthrough
case common.EFromTo.LocalBlobFS():
fallthrough
case common.EFromTo.LocalFile():
e := copyUploadEnumerator(jobPartOrder)
err = e.enumerate(cca)
case common.EFromTo.BlobLocal():
e := copyDownloadBlobEnumerator(jobPartOrder)
err = e.enumerate(cca)
case common.EFromTo.FileLocal():
e := copyDownloadFileEnumerator(jobPartOrder)
err = e.enumerate(cca)
case common.EFromTo.BlobFSLocal():
e := copyDownloadBlobFSEnumerator(jobPartOrder)
err = e.enumerate(cca)
case common.EFromTo.BlobTrash():
e, createErr := newRemoveBlobEnumerator(cca)
if createErr != nil {
return createErr
}
err = e.enumerate()
case common.EFromTo.FileTrash():
e, createErr := newRemoveFileEnumerator(cca)
if createErr != nil {
return createErr
}
err = e.enumerate()
case common.EFromTo.BlobBlob():
e := copyS2SMigrationBlobEnumerator{
copyS2SMigrationEnumeratorBase: copyS2SMigrationEnumeratorBase{
CopyJobPartOrderRequest: jobPartOrder,
},
}
err = e.enumerate(cca)
case common.EFromTo.FileBlob():
e := copyS2SMigrationFileEnumerator{
copyS2SMigrationEnumeratorBase: copyS2SMigrationEnumeratorBase{
CopyJobPartOrderRequest: jobPartOrder,
},
}
err = e.enumerate(cca)
case common.EFromTo.S3Blob():
e := copyS2SMigrationS3Enumerator{ // S3 enumerator for S2S copy.
copyS2SMigrationEnumeratorBase: copyS2SMigrationEnumeratorBase{
CopyJobPartOrderRequest: jobPartOrder,
},
}
err = e.enumerate(cca)
// TODO: Hide the File to Blob direction temporarily, as service support on-going.
// case common.EFromTo.FileBlob():
// e := copyFileToNEnumerator(jobPartOrder)
// err = e.enumerate(cca)
default:
return fmt.Errorf("copy direction %v is not supported\n", cca.fromTo)
}
if err != nil {
return fmt.Errorf("cannot start job due to error: %s.\n", err)
}
return nil
}
// wraps call to lifecycle manager to wait for the job to complete
// if blocking is specified to true, then this method will never return
// if blocking is specified to false, then another goroutine spawns and wait out the job
func (cca *cookedCopyCmdArgs) waitUntilJobCompletion(blocking bool) {
// print initial message to indicate that the job is starting
glcm.Init(common.GetStandardInitOutputBuilder(cca.jobID.String(), fmt.Sprintf("%s/%s.log", azcopyLogPathFolder, cca.jobID)))
// initialize the times necessary to track progress
cca.jobStartTime = time.Now()
cca.intervalStartTime = time.Now()
cca.intervalBytesTransferred = 0
// hand over control to the lifecycle manager if blocking
if blocking {
glcm.InitiateProgressReporting(cca, !cca.cancelFromStdin)
glcm.SurrenderControl()
} else {
// non-blocking, return after spawning a go routine to watch the job
glcm.InitiateProgressReporting(cca, !cca.cancelFromStdin)
}
}
func (cca *cookedCopyCmdArgs) Cancel(lcm common.LifecycleMgr) {
// prompt for confirmation, except when:
// 1. output is not in text format
// 2. azcopy was spawned by another process (cancelFromStdin indicates this)
// 3. enumeration is complete
if !(azcopyOutputFormat != common.EOutputFormat.Text() || cca.cancelFromStdin || cca.isEnumerationComplete) {
answer := lcm.Prompt("The source enumeration is not complete, cancelling the job at this point means it cannot be resumed. Please confirm with y/n: ")
// read a line from stdin, if the answer is not yes, then abort cancel by returning
if !strings.EqualFold(answer, "y") {
return
}
}
err := cookedCancelCmdArgs{jobID: cca.jobID}.process()
if err != nil {
lcm.Error("error occurred while cancelling the job " + cca.jobID.String() + ": " + err.Error())
}
}
func (cca *cookedCopyCmdArgs) ReportProgressOrExit(lcm common.LifecycleMgr) {
// fetch a job status
var summary common.ListJobSummaryResponse
Rpc(common.ERpcCmd.ListJobSummary(), &cca.jobID, &summary)
jobDone := summary.JobStatus.IsJobDone()
// if json is not desired, and job is done, then we generate a special end message to conclude the job
duration := time.Now().Sub(cca.jobStartTime) // report the total run time of the job
if jobDone {
exitCode := common.EExitCode.Success()
if summary.TransfersFailed > 0 {
exitCode = common.EExitCode.Error()
}
lcm.Exit(func(format common.OutputFormat) string {
if format == common.EOutputFormat.Json() {
jsonOutput, err := json.Marshal(summary)
common.PanicIfErr(err)
return string(jsonOutput)
} else {
output := fmt.Sprintf(
"\n\nJob %s summary\nElapsed Time (Minutes): %v\nTotal Number Of Transfers: %v\nNumber of Transfers Completed: %v\nNumber of Transfers Failed: %v\nNumber of Transfers Skipped: %v\nTotalBytesTransferred: %v\nFinal Job Status: %v\n",
summary.JobID.String(),
ste.ToFixed(duration.Minutes(), 4),
summary.TotalTransfers,
summary.TransfersCompleted,
summary.TransfersFailed,
summary.TransfersSkipped,
summary.TotalBytesTransferred,
summary.JobStatus)
jobMan, exists := ste.JobsAdmin.JobMgr(summary.JobID)
if exists {
jobMan.Log(pipeline.LogInfo, output)
}
return output
}
}, exitCode)
}
var computeThroughput = func() float64 {
// compute the average throughput for the last time interval
bytesInMb := float64(float64(summary.BytesOverWire-cca.intervalBytesTransferred) / float64(1024*1024))
timeElapsed := time.Since(cca.intervalStartTime).Seconds()
// reset the interval timer and byte count
cca.intervalStartTime = time.Now()
cca.intervalBytesTransferred = summary.BytesOverWire
return common.Iffloat64(timeElapsed != 0, bytesInMb/timeElapsed, 0) * 8
}
glcm.Progress(func(format common.OutputFormat) string {
if format == common.EOutputFormat.Json() {
jsonOutput, err := json.Marshal(summary)
common.PanicIfErr(err)
return string(jsonOutput)
} else {
// if json is not needed, then we generate a message that goes nicely on the same line
// display a scanning keyword if the job is not completely ordered
var scanningString = " (scanning...)"
if summary.CompleteJobOrdered {
scanningString = ""
}
throughput := computeThroughput()
throughputString := fmt.Sprintf("2-sec Throughput (Mb/s): %v", ste.ToFixed(throughput, 4))
if throughput == 0 {
// As there would be case when no bits sent from local, e.g. service side copy, when throughput = 0, hide it.
throughputString = ""
}
// indicate whether constrained by disk or not
perfString, diskString := getPerfDisplayText(summary.PerfStrings, summary.PerfConstraint, duration)
return fmt.Sprintf("%v Done, %v Failed, %v Pending, %v Skipped, %v Total%s, %s%s%s",
summary.TransfersCompleted,
summary.TransfersFailed,
summary.TotalTransfers-(summary.TransfersCompleted+summary.TransfersFailed+summary.TransfersSkipped),
summary.TransfersSkipped, summary.TotalTransfers, scanningString, perfString, throughputString, diskString)
}
})
}
// Is disk speed looking like a constraint on throughput? Ignore the first little-while,
// to give an (arbitrary) amount of time for things to reach steady-state.
func getPerfDisplayText(perfDiagnosticStrings []string, constraint common.PerfConstraint, durationOfJob time.Duration) (perfString string, diskString string) {
perfString = ""
if shouldDisplayPerfStates() {
perfString = "[States: " + strings.Join(perfDiagnosticStrings, ", ") + "], "
}
haveBeenRunningLongEnoughToStabilize := durationOfJob.Seconds() > 30 // this duration is an arbitrary guestimate
if constraint != common.EPerfConstraint.Unknown() && haveBeenRunningLongEnoughToStabilize {
diskString = fmt.Sprintf(" (%s may be limiting speed)", constraint)
} else {
diskString = ""
}
return
}
func shouldDisplayPerfStates() bool {
return glcm.GetEnvironmentVariable(common.EEnvironmentVariable.ShowPerfStates()) != ""
}
func isStdinPipeIn() (bool, error) {
// check the Stdin to see if we are uploading or downloading
info, err := os.Stdin.Stat()
if err != nil {
return false, fmt.Errorf("fatal: failed to read from Stdin due to error: %s", err)
}
// if the stdin is a named pipe, then we assume there will be data on the stdin
// the reason for this assumption is that we do not know when will the data come in
// it could come in right away, or come in 10 minutes later
return info.Mode()&os.ModeNamedPipe != 0, nil
}
// TODO check file size, max is 4.75TB
func init() {
raw := rawCopyCmdArgs{}
// cpCmd represents the cp command
cpCmd := &cobra.Command{
Use: "copy [source] [destination]",
Aliases: []string{"cp", "c"},
SuggestFor: []string{"cpy", "cy", "mv"}, //TODO why does message appear twice on the console
Short: copyCmdShortDescription,
Long: copyCmdLongDescription,
Example: copyCmdExample,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) == 1 { // redirection
if stdinPipeIn, err := isStdinPipeIn(); stdinPipeIn == true {
raw.src = pipeLocation
raw.dst = args[0]
} else {
if err != nil {
return fmt.Errorf("fatal: failed to read from Stdin due to error: %s", err)
} else {
raw.src = args[0]
raw.dst = pipeLocation
}
}
} else if len(args) == 2 { // normal copy
raw.src = args[0]
raw.dst = args[1]
} else {
return errors.New("wrong number of arguments, please refer to the help page on usage of this command")
}
return nil
},
Run: func(cmd *cobra.Command, args []string) {
cooked, err := raw.cook()
if err != nil {
glcm.Error("failed to parse user input due to error: " + err.Error())
}
glcm.Info("Scanning...")
cooked.commandString = copyHandlerUtil{}.ConstructCommandStringFromArgs()
err = cooked.process()
if err != nil {
glcm.Error("failed to perform copy command due to error: " + err.Error())
}
glcm.SurrenderControl()
},
}
rootCmd.AddCommand(cpCmd)
// filters change which files get transferred
cpCmd.PersistentFlags().BoolVar(&raw.followSymlinks, "follow-symlinks", false, "follow symbolic links when uploading from local file system.")
cpCmd.PersistentFlags().BoolVar(&raw.withSnapshots, "with-snapshots", false, "include the snapshots. Only valid when the source is blobs.")
cpCmd.PersistentFlags().StringVar(&raw.legacyInclude, "include", "", "only include these files when copying. "+
"Support use of *. Files should be separated with ';'.")
// This flag is implemented only for Storage Explorer.
cpCmd.PersistentFlags().StringVar(&raw.listOfFilesToCopy, "list-of-files", "", "defines the location of json which has the list of only files to be copied")
cpCmd.PersistentFlags().StringVar(&raw.legacyExclude, "exclude", "", "exclude these files when copying. Support use of *.")
cpCmd.PersistentFlags().BoolVar(&raw.forceWrite, "overwrite", true, "overwrite the conflicting files/blobs at the destination if this flag is set to true.")
cpCmd.PersistentFlags().BoolVar(&raw.recursive, "recursive", false, "look into sub-directories recursively when uploading from local file system.")
cpCmd.PersistentFlags().StringVar(&raw.fromTo, "from-to", "", "optionally specifies the source destination combination. For Example: LocalBlob, BlobLocal, LocalBlobFS.")
cpCmd.PersistentFlags().StringVar(&raw.excludeBlobType, "exclude-blob-type", "", "optionally specifies the type of blob (BlockBlob/ PageBlob/ AppendBlob) to exclude when copying blobs from Container / Account. Use of "+
"this flag is not applicable for copying data from non azure-service to service. More than one blob should be separated by ';' ")
// options change how the transfers are performed
cpCmd.PersistentFlags().StringVar(&raw.logVerbosity, "log-level", "INFO", "define the log verbosity for the log file, available levels: INFO(all requests/responses), WARNING(slow responses), ERROR(only failed requests), and NONE(no output logs).")
cpCmd.PersistentFlags().Uint32Var(&raw.blockSizeMB, "block-size-mb", 0, "use this block size (specified in MiB) when uploading to/downloading from Azure Storage. Default is automatically calculated based on file size.")
cpCmd.PersistentFlags().StringVar(&raw.blobType, "blob-type", "None", "defines the type of blob at the destination. This is used in case of upload / account to account copy")
cpCmd.PersistentFlags().StringVar(&raw.blockBlobTier, "block-blob-tier", "None", "upload block blob to Azure Storage using this blob tier.")
cpCmd.PersistentFlags().StringVar(&raw.pageBlobTier, "page-blob-tier", "None", "upload page blob to Azure Storage using this blob tier.")
cpCmd.PersistentFlags().StringVar(&raw.metadata, "metadata", "", "upload to Azure Storage with these key-value pairs as metadata.")
cpCmd.PersistentFlags().StringVar(&raw.contentType, "content-type", "", "specifies content type of the file. Implies no-guess-mime-type. Returned on download.")
cpCmd.PersistentFlags().StringVar(&raw.contentEncoding, "content-encoding", "", "set the content-encoding header. Returned on download.")
cpCmd.PersistentFlags().StringVar(&raw.contentDisposition, "content-disposition", "", "set the content-disposition header. Returned on download.")
cpCmd.PersistentFlags().StringVar(&raw.contentLanguage, "content-language", "", "set the content-language header. Returned on download.")
cpCmd.PersistentFlags().StringVar(&raw.cacheControl, "cache-control", "", "set the cache-control header. Returned on download.")
cpCmd.PersistentFlags().BoolVar(&raw.noGuessMimeType, "no-guess-mime-type", false, "prevents AzCopy from detecting the content-type based on the extension/content of the file.")
cpCmd.PersistentFlags().BoolVar(&raw.preserveLastModifiedTime, "preserve-last-modified-time", false, "only available when destination is file system.")
cpCmd.PersistentFlags().BoolVar(&raw.putMd5, "put-md5", false, "create an MD5 hash of each file, and save the hash as the Content-MD5 property of the destination blob/file. (By default the hash is NOT created.) Only available when uploading.")
cpCmd.PersistentFlags().StringVar(&raw.md5ValidationOption, "check-md5", common.DefaultHashValidationOption.String(), "specifies how strictly MD5 hashes should be validated when downloading. Only available when downloading. Available options: NoCheck, LogOnly, FailIfDifferent, FailIfDifferentOrMissing.")
cpCmd.PersistentFlags().BoolVar(&raw.cancelFromStdin, "cancel-from-stdin", false, "true if user wants to cancel the process by passing 'cancel' "+
"to the standard input. This is mostly used when the application is spawned by another process.")
cpCmd.PersistentFlags().BoolVar(&raw.background, "background-op", false, "true if user has to perform the operations as a background operation.")
cpCmd.PersistentFlags().StringVar(&raw.acl, "acl", "", "Access conditions to be used when uploading/downloading from Azure Storage.")
cpCmd.PersistentFlags().BoolVar(&raw.s2sPreserveProperties, "s2s-preserve-properties", true, "preserve full properties during service to service copy. "+
"For S3 and Azure File non-single file source, as list operation doesn't return full properties of objects/files, to preserve full properties AzCopy needs to send one additional request per object/file.")
cpCmd.PersistentFlags().BoolVar(&raw.s2sPreserveAccessTier, "s2s-preserve-access-tier", true, "preserve access tier during service to service copy. "+
"please refer to https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers to ensure destination storage account supports setting access tier. "+
"In the cases that setting access tier is not supported, please use s2sPreserveAccessTier=false to bypass copying access tier. ")
cpCmd.PersistentFlags().BoolVar(&raw.s2sSourceChangeValidation, "s2s-detect-source-changed", false, "check if source has changed after enumerating. "+
"For S2S copy, as source is a remote resource, validating whether source has changed need additional request costs. ")
cpCmd.PersistentFlags().StringVar(&raw.s2sInvalidMetadataHandleOption, "s2s-handle-invalid-metadata", common.DefaultInvalidMetadataHandleOption.String(), "specifies how invalid metadata keys are handled. AvailabeOptions: ExcludeIfInvalid, FailIfInvalid, RenameIfInvalid.")
// s2sGetPropertiesInBackend is an optional flag for controlling whether S3 object's or Azure file's full properties are get during enumerating in frontend or
// right before transferring in ste(backend).
// The traditional behavior of all existing enumerator is to get full properties during enumerating(more specifically listing),
// while this could cause big performance issue for S3 and Azure file, where listing doesn't return full properties,
// and enumerating logic do fetching properties sequentially!
// To achieve better performance and at same time have good control for overall go routine numbers, getting property in ste is introduced,
// so properties can be get in parallel, at same time no additional go routines are created for this specific job.
// The usage of this hidden flag is to provide fallback to traditional behavior, when service supports returning full properties during list.
cpCmd.PersistentFlags().BoolVar(&raw.s2sGetPropertiesInBackend, "s2s-get-properties-in-backend", true, "get S3 objects' or Azure files' properties in backend. ")
// not implemented
cpCmd.PersistentFlags().MarkHidden("acl")
// permanently hidden
// Hide the list-of-files flag since it is implemented only for Storage Explorer.
cpCmd.PersistentFlags().MarkHidden("list-of-files")
cpCmd.PersistentFlags().MarkHidden("with-snapshots")
cpCmd.PersistentFlags().MarkHidden("include")
cpCmd.PersistentFlags().MarkHidden("background-op")
cpCmd.PersistentFlags().MarkHidden("cancel-from-stdin")
cpCmd.PersistentFlags().MarkHidden("s2s-get-properties-in-backend")
cpCmd.PersistentFlags().MarkHidden("with-snapshots") // TODO this flag is not supported right now
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"bytes"
"context"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/spf13/cobra"
"net/url"
"os"
"strings"
"time"
)
var azcopyAppPathFolder string
var azcopyLogPathFolder string
var outputFormatRaw string
var azcopyOutputFormat common.OutputFormat
// rootCmd represents the base command when called without any subcommands
var rootCmd = &cobra.Command{
Version: common.AzcopyVersion, // will enable the user to see the version info in the standard posix way: --version
Use: "azcopy",
Short: rootCmdShortDescription,
Long: rootCmdLongDescription,
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
err := azcopyOutputFormat.Parse(outputFormatRaw)
glcm.SetOutputFormat(azcopyOutputFormat)
if err != nil {
return err
}
// spawn a routine to fetch and compare the local application's version against the latest version available
// if there's a newer version that can be used, then write the suggestion to stderr
// however if this takes too long the message won't get printed
// Note: this function is only triggered for non-help commands
go detectNewVersion()
return nil
},
}
// hold a pointer to the global lifecycle controller so that commands could output messages and exit properly
var glcm = common.GetLifecycleMgr()
// Execute adds all child commands to the root command and sets flags appropriately.
// This is called by main.main(). It only needs to happen once to the rootCmd.
func Execute(azsAppPathFolder, logPathFolder string) {
azcopyAppPathFolder = azsAppPathFolder
azcopyLogPathFolder = logPathFolder
if err := rootCmd.Execute(); err != nil {
glcm.Error(err.Error())
} else {
// our commands all control their own life explicitly with the lifecycle manager
// only help commands reach this point
// execute synchronously before exiting
detectNewVersion()
glcm.Exit(nil, common.EExitCode.Success())
}
}
func init() {
rootCmd.PersistentFlags().StringVar(&outputFormatRaw, "output-type", "text", "format of the command's output, the choices include: text, json.")
// Special flag for generating test data
// TODO: find a cleaner way to get the value into common, rather than just using it directly as a variable here
rootCmd.PersistentFlags().StringVar(&common.SendRandomDataExt, "send-random-data-ext", "",
"Files with this extension will not have their actual content sent. Instead, random data will be generated "+
"and sent. The number of random bytes sent will equal the file size. To be used in testing. To use, use command-line "+
"tools to create a sparse file of any desired size (but zero bytes actually used on-disk). Choose a distinctive"+
"extension for the file (e.g. 'azCopySparseFill'). Then set this parameter to that extension (without the dot).")
// On Windows, to create a sparse file, do something like this from an admin prompt:
// fsutil file createnew testfile.AzSparseFill 0
// fsutil sparse setflag .\testfile.AzSparseFill
// fsutil file seteof .\testfile.AzSparseFill 536870912000
// Use dd on Linux.
// Not making this publicly documented yet
// TODO: add API calls to check that the on-disk size really is zero for the affected files, then make this publicly exposed
rootCmd.PersistentFlags().MarkHidden("send-random-data-ext")
}
func detectNewVersion() {
const versionMetadataUrl = "https://aka.ms/azcopyv10-version-metadata"
// step 0: check the Stderr before checking version
_, err := os.Stderr.Stat()
if err != nil {
return
}
// step 1: initialize pipeline
p := azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{
Retry: azblob.RetryOptions{
Policy: azblob.RetryPolicyExponential,
MaxTries: 1, // try a single time, if network is not available, just fail fast
TryTimeout: time.Second * 3, // don't wait for too long
RetryDelay: downloadRetryDelay,
MaxRetryDelay: downloadMaxRetryDelay,
},
Telemetry: azblob.TelemetryOptions{
Value: common.UserAgent,
},
})
// step 2: parse source url
u, err := url.Parse(versionMetadataUrl)
if err != nil {
return
}
// step 3: start download
blobURL := azblob.NewBlobURL(*u, p)
blobStream, err := blobURL.Download(context.TODO(), 0, azblob.CountToEnd, azblob.BlobAccessConditions{}, false)
if err != nil {
return
}
blobBody := blobStream.Body(azblob.RetryReaderOptions{MaxRetryRequests: downloadMaxTries})
defer blobBody.Close()
// step 4: read newest version str
buf := new(bytes.Buffer)
n, err := buf.ReadFrom(blobBody)
if n == 0 || err != nil {
return
}
// only take the first line, in case the version metadata file is upgraded in the future
remoteVersion := strings.Split(buf.String(), "\n")[0]
// step 5: compare remote version to local version to see if there's a newer AzCopy
v1, err := NewVersion(common.AzcopyVersion)
if err != nil {
return
}
v2, err := NewVersion(remoteVersion)
if err != nil {
return
}
if v1.OlderThan(*v2) {
executablePathSegments := strings.Split(strings.Replace(os.Args[0], "\\", "/", -1), "/")
executableName := executablePathSegments[len(executablePathSegments)-1]
// output in info mode instead of stderr, as it was crashing CI jobs of some people
glcm.Info(executableName + ": A newer version " + remoteVersion + " is available to download\n")
}
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"context"
"fmt"
"net/http"
"net/url"
"strings"
"time"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-file-go/azfile"
)
type azureFilesUploader struct {
jptm IJobPartTransferMgr
fileURL azfile.FileURL
chunkSize uint32
numChunks uint32
pipeline pipeline.Pipeline
pacer *pacer
md5Channel chan []byte
creationTimeHeaders *azfile.FileHTTPHeaders // pointer so default value, nil, is clearly "wrong" and can't be used by accident
}
func newAzureFilesUploader(jptm IJobPartTransferMgr, destination string, p pipeline.Pipeline, pacer *pacer, sip ISourceInfoProvider) (ISenderBase, error) {
info := jptm.Info()
// compute chunk size
// If the given chunk Size for the Job is greater than maximum file chunk size i.e 4 MB
// then chunk size will be 4 MB.
chunkSize := info.BlockSize
if chunkSize > common.DefaultAzureFileChunkSize {
chunkSize = common.DefaultAzureFileChunkSize
if jptm.ShouldLog(pipeline.LogWarning) {
jptm.Log(pipeline.LogWarning,
fmt.Sprintf("Block size %d larger than maximum file chunk size, 4 MB chunk size used", info.BlockSize))
}
}
// compute num chunks
numChunks := getNumChunks(info.SourceSize, chunkSize)
// make sure URL is parsable
destURL, err := url.Parse(destination)
if err != nil {
return nil, err
}
return &azureFilesUploader{
jptm: jptm,
fileURL: azfile.NewFileURL(*destURL, p),
chunkSize: chunkSize,
numChunks: numChunks,
pipeline: p,
pacer: pacer,
md5Channel: newMd5Channel(),
}, nil
}
func (u *azureFilesUploader) ChunkSize() uint32 {
return u.chunkSize
}
func (u *azureFilesUploader) NumChunks() uint32 {
return u.numChunks
}
func (u *azureFilesUploader) Md5Channel() chan<- []byte {
return u.md5Channel
}
func (u *azureFilesUploader) RemoteFileExists() (bool, error) {
return remoteObjectExists(u.fileURL.GetProperties(u.jptm.Context()))
}
func (u *azureFilesUploader) Prologue(state common.PrologueState) {
jptm := u.jptm
info := jptm.Info()
// Create the parent directories of the file. Note share must be existed, as the files are listed from share or directory.
err := CreateParentDirToRoot(jptm.Context(), u.fileURL, u.pipeline)
if err != nil {
jptm.FailActiveUpload("Creating parent directory", err)
return
}
// Create Azure file with the source size
fileHTTPHeaders, metaData := jptm.FileDstData(state.LeadingBytes)
_, err = u.fileURL.Create(jptm.Context(), info.SourceSize, fileHTTPHeaders, metaData)
if err != nil {
jptm.FailActiveUpload("Creating file", err)
return
}
// Save headers to re-use, with same values, in epilogue
u.creationTimeHeaders = &fileHTTPHeaders
}
func (u *azureFilesUploader) GenerateUploadFunc(id common.ChunkID, blockIndex int32, reader common.SingleChunkReader, chunkIsWholeFile bool) chunkFunc {
return createSendToRemoteChunkFunc(u.jptm, id, func() {
jptm := u.jptm
defer reader.Close() // In case of memory leak in sparse file case.
if jptm.Info().SourceSize == 0 {
// nothing to do, since this is a dummy chunk in a zero-size file, and the prologue will have done all the real work
return
}
if reader.HasPrefetchedEntirelyZeros() {
// for this destination type, there is no need to upload ranges than consist entirely of zeros
jptm.Log(pipeline.LogDebug,
fmt.Sprintf("Not uploading range from %d to %d, all bytes are zero",
id.OffsetInFile, id.OffsetInFile+reader.Length()))
return
}
// upload the byte range represented by this chunk
jptm.LogChunkStatus(id, common.EWaitReason.Body())
body := newLiteRequestBodyPacer(reader, u.pacer)
_, err := u.fileURL.UploadRange(jptm.Context(), id.OffsetInFile, body, nil)
if err != nil {
jptm.FailActiveUpload("Uploading range", err)
return
}
})
}
func (u *azureFilesUploader) Epilogue() {
jptm := u.jptm
// set content MD5 (only way to do this is to re-PUT all the headers, this time with the MD5 included)
if jptm.TransferStatus() > 0 {
tryPutMd5Hash(jptm, u.md5Channel, func(md5Hash []byte) error {
epilogueHeaders := *u.creationTimeHeaders
epilogueHeaders.ContentMD5 = md5Hash
_, err := u.fileURL.SetHTTPHeaders(jptm.Context(), epilogueHeaders)
return err
})
}
// Cleanup
if jptm.TransferStatus() <= 0 {
// If the transfer status is less than or equal to 0
// then transfer was either failed or cancelled
// the file created in share needs to be deleted, since it's
// contents will be at an unknown stage of partial completeness
deletionContext, cancelFn := context.WithTimeout(context.Background(), 2*time.Minute)
defer cancelFn()
_, err := u.fileURL.Delete(deletionContext)
if err != nil {
// TODO: this was LogInfo, but inside a ShouldLog(LogError) if statement. Should I put it back that way? It was not like that for blobFS
jptm.Log(pipeline.LogError, fmt.Sprintf("error deleting the (incomplete) file %s. Failed with error %s", u.fileURL.String(), err.Error()))
}
}
}
// getParentDirectoryURL gets parent directory URL of an Azure FileURL.
func getParentDirectoryURL(fileURL azfile.FileURL, p pipeline.Pipeline) azfile.DirectoryURL {
u := fileURL.URL()
u.Path = u.Path[:strings.LastIndex(u.Path, "/")]
return azfile.NewDirectoryURL(u, p)
}
// verifyAndHandleCreateErrors handles create errors, StatusConflict is ignored, as specific level directory could be existing.
// Report http.StatusForbidden, as user should at least have read and write permission of the destination,
// and there is no permission on directory level, i.e. create directory is a general permission for each level diretories for Azure file.
func verifyAndHandleCreateErrors(err error) error {
if err != nil {
sErr := err.(azfile.StorageError)
if sErr != nil && sErr.Response() != nil &&
(sErr.Response().StatusCode == http.StatusConflict) { // Note the ServiceCode actually be AuthenticationFailure when share failed to be created, if want to create share as well.
return nil
}
return err
}
return nil
}
// splitWithoutToken splits string with a given token, and returns splitted results without token.
func splitWithoutToken(str string, token rune) []string {
return strings.FieldsFunc(str, func(c rune) bool {
return c == token
})
}
// CreateParentDirToRoot creates parent directories of the Azure file if file's parent directory doesn't exist.
func CreateParentDirToRoot(ctx context.Context, fileURL azfile.FileURL, p pipeline.Pipeline) error {
dirURL := getParentDirectoryURL(fileURL, p)
dirURLExtension := common.FileURLPartsExtension{FileURLParts: azfile.NewFileURLParts(dirURL.URL())}
// Check whether parent dir of the file exists.
if _, err := dirURL.GetProperties(ctx); err != nil {
if err.(azfile.StorageError) != nil && (err.(azfile.StorageError)).Response() != nil &&
(err.(azfile.StorageError).Response().StatusCode == http.StatusNotFound) { // At least need read and write permisson for destination
// File's parent directory doesn't exist, try to create the parent directories.
// Split directories as segments.
segments := splitWithoutToken(dirURLExtension.DirectoryOrFilePath, '/')
shareURL := azfile.NewShareURL(dirURLExtension.GetShareURL(), p)
curDirURL := shareURL.NewRootDirectoryURL() // Share directory should already exist, doesn't support creating share
// Try to create the directories
for i := 0; i < len(segments); i++ {
curDirURL = curDirURL.NewDirectoryURL(segments[i])
_, err := curDirURL.Create(ctx, azfile.Metadata{})
if verifiedErr := verifyAndHandleCreateErrors(err); verifiedErr != nil {
return verifiedErr
}
}
} else {
return err
}
}
// Directly return if parent directory exists.
return nil
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"fmt"
"io/ioutil"
"net/url"
"os"
"path/filepath"
"strings"
"time"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/Azure/azure-storage-file-go/azfile"
chk "gopkg.in/check.v1"
minio "github.com/minio/minio-go"
)
const defaultFileSize = 1024
type scenarioHelper struct{}
var specialNames = []string{
"打麻将.txt",
"wow such space so much space",
"打%%#%@#%麻将.txt",
//"saywut.pdf?yo=bla&WUWUWU=foo&sig=yyy", // TODO this breaks on windows, figure out a way to add it only for tests on Unix
"coração",
"আপনার নাম কি",
"%4509%4254$85140&",
"Donaudampfschifffahrtselektrizitätenhauptbetriebswerkbauunterbeamtengesellschaft",
"お名前は何ですか",
"<NAME>",
"як вас звати",
}
func (scenarioHelper) generateLocalDirectory(c *chk.C) (dstDirName string) {
dstDirName, err := ioutil.TempDir("", "AzCopyLocalTest")
c.Assert(err, chk.IsNil)
return
}
// create a test file
func (scenarioHelper) generateLocalFile(filePath string, fileSize int) ([]byte, error) {
// generate random data
_, bigBuff := getRandomDataAndReader(fileSize)
// create all parent directories
err := os.MkdirAll(filepath.Dir(filePath), os.ModePerm)
if err != nil {
return nil, err
}
// write to file and return the data
err = ioutil.WriteFile(filePath, bigBuff, common.DEFAULT_FILE_PERM)
return bigBuff, err
}
func (s scenarioHelper) generateLocalFilesFromList(c *chk.C, dirPath string, fileList []string) {
for _, fileName := range fileList {
_, err := s.generateLocalFile(filepath.Join(dirPath, fileName), defaultFileSize)
c.Assert(err, chk.IsNil)
}
// sleep a bit so that the files' lmts are guaranteed to be in the past
time.Sleep(time.Millisecond * 1500)
}
func (s scenarioHelper) generateCommonRemoteScenarioForLocal(c *chk.C, dirPath string, prefix string) (fileList []string) {
fileList = make([]string, 50)
for i := 0; i < 10; i++ {
batch := []string{
generateName(prefix+"top", 0),
generateName(prefix+"sub1/", 0),
generateName(prefix+"sub2/", 0),
generateName(prefix+"sub1/sub3/sub5/", 0),
generateName(prefix+specialNames[i], 0),
}
for j, name := range batch {
fileList[5*i+j] = name
_, err := s.generateLocalFile(filepath.Join(dirPath, name), defaultFileSize)
c.Assert(err, chk.IsNil)
}
}
// sleep a bit so that the files' lmts are guaranteed to be in the past
time.Sleep(time.Millisecond * 1500)
return
}
// make 50 blobs with random names
// 10 of them at the top level
// 10 of them in sub dir "sub1"
// 10 of them in sub dir "sub2"
// 10 of them in deeper sub dir "sub1/sub3/sub5"
// 10 of them with special characters
func (scenarioHelper) generateCommonRemoteScenarioForBlob(c *chk.C, containerURL azblob.ContainerURL, prefix string) (blobList []string) {
blobList = make([]string, 50)
for i := 0; i < 10; i++ {
_, blobName1 := createNewBlockBlob(c, containerURL, prefix+"top")
_, blobName2 := createNewBlockBlob(c, containerURL, prefix+"sub1/")
_, blobName3 := createNewBlockBlob(c, containerURL, prefix+"sub2/")
_, blobName4 := createNewBlockBlob(c, containerURL, prefix+"sub1/sub3/sub5/")
_, blobName5 := createNewBlockBlob(c, containerURL, prefix+specialNames[i])
blobList[5*i] = blobName1
blobList[5*i+1] = blobName2
blobList[5*i+2] = blobName3
blobList[5*i+3] = blobName4
blobList[5*i+4] = blobName5
}
// sleep a bit so that the blobs' lmts are guaranteed to be in the past
time.Sleep(time.Millisecond * 1500)
return
}
func (scenarioHelper) generateCommonRemoteScenarioForAzureFile(c *chk.C, shareURL azfile.ShareURL, prefix string) (fileList []string) {
fileList = make([]string, 50)
for i := 0; i < 10; i++ {
_, fileName1 := createNewAzureFile(c, shareURL, prefix+"top")
_, fileName2 := createNewAzureFile(c, shareURL, prefix+"sub1/")
_, fileName3 := createNewAzureFile(c, shareURL, prefix+"sub2/")
_, fileName4 := createNewAzureFile(c, shareURL, prefix+"sub1/sub3/sub5/")
_, fileName5 := createNewAzureFile(c, shareURL, prefix+specialNames[i])
fileList[5*i] = fileName1
fileList[5*i+1] = fileName2
fileList[5*i+2] = fileName3
fileList[5*i+3] = fileName4
fileList[5*i+4] = fileName5
}
// sleep a bit so that the blobs' lmts are guaranteed to be in the past
time.Sleep(time.Millisecond * 1500)
return
}
// create the demanded blobs
func (scenarioHelper) generateBlobsFromList(c *chk.C, containerURL azblob.ContainerURL, blobList []string) {
for _, blobName := range blobList {
blob := containerURL.NewBlockBlobURL(blobName)
cResp, err := blob.Upload(ctx, strings.NewReader(blockBlobDefaultData), azblob.BlobHTTPHeaders{},
nil, azblob.BlobAccessConditions{})
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, 201)
}
// sleep a bit so that the blobs' lmts are guaranteed to be in the past
time.Sleep(time.Millisecond * 1500)
}
func (scenarioHelper) generateBlockBlobWithAccessTier(c *chk.C, containerURL azblob.ContainerURL, blobName string, accessTier azblob.AccessTierType) {
blob := containerURL.NewBlockBlobURL(blobName)
cResp, err := blob.Upload(ctx, strings.NewReader(blockBlobDefaultData), azblob.BlobHTTPHeaders{},
nil, azblob.BlobAccessConditions{})
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, 201)
_, err = blob.SetTier(ctx, accessTier, azblob.LeaseAccessConditions{})
c.Assert(err, chk.IsNil)
}
// create the demanded objects
func (scenarioHelper) generateObjects(c *chk.C, client *minio.Client, bucketName string, objectList []string) {
size := int64(len(objectDefaultData))
for _, objectName := range objectList {
n, err := client.PutObjectWithContext(ctx, bucketName, objectName, strings.NewReader(objectDefaultData), size, minio.PutObjectOptions{})
c.Assert(err, chk.IsNil)
c.Assert(n, chk.Equals, size)
}
}
// create the demanded files
func (scenarioHelper) generateFlatFiles(c *chk.C, shareURL azfile.ShareURL, fileList []string) {
for _, fileName := range fileList {
file := shareURL.NewRootDirectoryURL().NewFileURL(fileName)
err := azfile.UploadBufferToAzureFile(ctx, []byte(fileDefaultData), file, azfile.UploadToAzureFileOptions{})
c.Assert(err, chk.IsNil)
}
// sleep a bit so that the blobs' lmts are guaranteed to be in the past
time.Sleep(time.Millisecond * 1500)
}
// make 50 objects with random names
// 10 of them at the top level
// 10 of them in sub dir "sub1"
// 10 of them in sub dir "sub2"
// 10 of them in deeper sub dir "sub1/sub3/sub5"
// 10 of them with special characters
func (scenarioHelper) generateCommonRemoteScenarioForS3(c *chk.C, client *minio.Client, bucketName string, prefix string, returnObjectListWithBucketName bool) (objectList []string) {
objectList = make([]string, 50)
for i := 0; i < 10; i++ {
objectName1 := createNewObject(c, client, bucketName, prefix+"top")
objectName2 := createNewObject(c, client, bucketName, prefix+"sub1/")
objectName3 := createNewObject(c, client, bucketName, prefix+"sub2/")
objectName4 := createNewObject(c, client, bucketName, prefix+"sub1/sub3/sub5/")
objectName5 := createNewObject(c, client, bucketName, prefix+specialNames[i])
// Note: common.AZCOPY_PATH_SEPARATOR_STRING is added before bucket or objectName, as in the change minimize JobPartPlan file size,
// transfer.Source & transfer.Destination(after trimed the SourceRoot and DestinationRoot) are with AZCOPY_PATH_SEPARATOR_STRING suffix,
// when user provided source & destination are without / suffix, which is the case for scenarioHelper generated URL.
bucketPath := ""
if returnObjectListWithBucketName {
bucketPath = common.AZCOPY_PATH_SEPARATOR_STRING + bucketName
}
objectList[5*i] = bucketPath + common.AZCOPY_PATH_SEPARATOR_STRING + objectName1
objectList[5*i+1] = bucketPath + common.AZCOPY_PATH_SEPARATOR_STRING + objectName2
objectList[5*i+2] = bucketPath + common.AZCOPY_PATH_SEPARATOR_STRING + objectName3
objectList[5*i+3] = bucketPath + common.AZCOPY_PATH_SEPARATOR_STRING + objectName4
objectList[5*i+4] = bucketPath + common.AZCOPY_PATH_SEPARATOR_STRING + objectName5
}
// sleep a bit so that the blobs' lmts are guaranteed to be in the past
time.Sleep(time.Millisecond * 1500)
return
}
// create the demanded azure files
func (scenarioHelper) generateAzureFilesFromList(c *chk.C, shareURL azfile.ShareURL, fileList []string) {
for _, filePath := range fileList {
file := shareURL.NewRootDirectoryURL().NewFileURL(filePath)
// create parents first
generateParentsForAzureFile(c, file)
// create the file itself
cResp, err := file.Create(ctx, defaultAzureFileSizeInBytes, azfile.FileHTTPHeaders{}, azfile.Metadata{})
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, 201)
}
// sleep a bit so that the files' lmts are guaranteed to be in the past
time.Sleep(time.Millisecond * 1500)
}
// Golang does not have sets, so we have to use a map to fulfill the same functionality
func (scenarioHelper) convertListToMap(list []string) map[string]int {
lookupMap := make(map[string]int)
for _, entryName := range list {
lookupMap[entryName] = 0
}
return lookupMap
}
func (scenarioHelper) shaveOffPrefix(list []string, prefix string) []string {
cleanList := make([]string, len(list))
for i, item := range list {
cleanList[i] = strings.TrimPrefix(item, prefix)
}
return cleanList
}
func (scenarioHelper) getRawContainerURLWithSAS(c *chk.C, containerName string) url.URL {
accountName, accountKey := getAccountAndKey()
credential, err := azblob.NewSharedKeyCredential(accountName, accountKey)
c.Assert(err, chk.IsNil)
containerURLWithSAS := getContainerURLWithSAS(c, *credential, containerName)
return containerURLWithSAS.URL()
}
func (scenarioHelper) getRawBlobURLWithSAS(c *chk.C, containerName string, blobName string) url.URL {
accountName, accountKey := getAccountAndKey()
credential, err := azblob.NewSharedKeyCredential(accountName, accountKey)
c.Assert(err, chk.IsNil)
containerURLWithSAS := getContainerURLWithSAS(c, *credential, containerName)
blobURLWithSAS := containerURLWithSAS.NewBlockBlobURL(blobName)
return blobURLWithSAS.URL()
}
func (scenarioHelper) getRawBlobServiceURLWithSAS(c *chk.C) url.URL {
accountName, accountKey := getAccountAndKey()
credential, err := azblob.NewSharedKeyCredential(accountName, accountKey)
c.Assert(err, chk.IsNil)
return getServiceURLWithSAS(c, *credential).URL()
}
func (scenarioHelper) getBlobServiceURL(c *chk.C) azblob.ServiceURL {
accountName, accountKey := getAccountAndKey()
credential, err := azblob.NewSharedKeyCredential(accountName, accountKey)
c.Assert(err, chk.IsNil)
rawURL := fmt.Sprintf("https://%s.blob.core.windows.net", credential.AccountName())
// convert the raw url and validate it was parsed successfully
fullURL, err := url.Parse(rawURL)
c.Assert(err, chk.IsNil)
return azblob.NewServiceURL(*fullURL, azblob.NewPipeline(credential, azblob.PipelineOptions{}))
}
func (s scenarioHelper) getContainerURL(c *chk.C, containerName string) azblob.ContainerURL {
serviceURL := s.getBlobServiceURL(c)
containerURL := serviceURL.NewContainerURL(containerName)
return containerURL
}
func (scenarioHelper) getRawS3AccountURL(c *chk.C, region string) url.URL {
rawURL := fmt.Sprintf("https://s3%s.amazonaws.com", common.IffString(region == "", "", "-"+region))
fullURL, err := url.Parse(rawURL)
c.Assert(err, chk.IsNil)
return *fullURL
}
// TODO: Possibly add virtual-hosted-style and dual stack support. Currently use path style for testing.
func (scenarioHelper) getRawS3BucketURL(c *chk.C, region string, bucketName string) url.URL {
rawURL := fmt.Sprintf("https://s3%s.amazonaws.com/%s", common.IffString(region == "", "", "-"+region), bucketName)
fullURL, err := url.Parse(rawURL)
c.Assert(err, chk.IsNil)
return *fullURL
}
func (scenarioHelper) getRawS3ObjectURL(c *chk.C, region string, bucketName string, objectName string) url.URL {
rawURL := fmt.Sprintf("https://s3%s.amazonaws.com/%s/%s", common.IffString(region == "", "", "-"+region), bucketName, objectName)
fullURL, err := url.Parse(rawURL)
c.Assert(err, chk.IsNil)
return *fullURL
}
func (scenarioHelper) getRawFileURLWithSAS(c *chk.C, shareName string, fileName string) url.URL {
credential, err := getGenericCredentialForFile("")
c.Assert(err, chk.IsNil)
shareURLWithSAS := getShareURLWithSAS(c, *credential, shareName)
fileURLWithSAS := shareURLWithSAS.NewRootDirectoryURL().NewFileURL(fileName)
return fileURLWithSAS.URL()
}
func (scenarioHelper) getRawShareURLWithSAS(c *chk.C, shareName string) url.URL {
accountName, accountKey := getAccountAndKey()
credential, err := azfile.NewSharedKeyCredential(accountName, accountKey)
c.Assert(err, chk.IsNil)
shareURLWithSAS := getShareURLWithSAS(c, *credential, shareName)
return shareURLWithSAS.URL()
}
func (scenarioHelper) blobExists(blobURL azblob.BlobURL) bool {
_, err := blobURL.GetProperties(context.Background(), azblob.BlobAccessConditions{})
if err == nil {
return true
}
return false
}
func (scenarioHelper) containerExists(containerURL azblob.ContainerURL) bool {
_, err := containerURL.GetProperties(context.Background(), azblob.LeaseAccessConditions{})
if err == nil {
return true
}
return false
}
func runSyncAndVerify(c *chk.C, raw rawSyncCmdArgs, verifier func(err error)) {
// the simulated user input should parse properly
cooked, err := raw.cook()
c.Assert(err, chk.IsNil)
// the enumeration ends when process() returns
err = cooked.process()
// the err is passed to verified, which knows whether it is expected or not
verifier(err)
}
func runCopyAndVerify(c *chk.C, raw rawCopyCmdArgs, verifier func(err error)) {
// the simulated user input should parse properly
cooked, err := raw.cook()
c.Assert(err, chk.IsNil)
// the enumeration ends when process() returns
err = cooked.process()
// the err is passed to verified, which knows whether it is expected or not
verifier(err)
}
func validateUploadTransfersAreScheduled(c *chk.C, sourcePrefix string, destinationPrefix string, expectedTransfers []string, mockedRPC interceptor) {
validateCopyTransfersAreScheduled(c, false, true, sourcePrefix, destinationPrefix, expectedTransfers, mockedRPC)
}
func validateDownloadTransfersAreScheduled(c *chk.C, sourcePrefix string, destinationPrefix string, expectedTransfers []string, mockedRPC interceptor) {
validateCopyTransfersAreScheduled(c, true, false, sourcePrefix, destinationPrefix, expectedTransfers, mockedRPC)
}
func validateCopyTransfersAreScheduled(c *chk.C, isSrcEncoded bool, isDstEncoded bool, sourcePrefix string, destinationPrefix string, expectedTransfers []string, mockedRPC interceptor) {
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(expectedTransfers))
// validate that the right transfers were sent
lookupMap := scenarioHelper{}.convertListToMap(expectedTransfers)
for _, transfer := range mockedRPC.transfers {
srcRelativeFilePath := strings.TrimPrefix(transfer.Source, sourcePrefix)
dstRelativeFilePath := strings.TrimPrefix(transfer.Destination, destinationPrefix)
if isSrcEncoded {
srcRelativeFilePath, _ = url.PathUnescape(srcRelativeFilePath)
}
if isDstEncoded {
dstRelativeFilePath, _ = url.PathUnescape(dstRelativeFilePath)
}
// the relative paths should be equal
c.Assert(srcRelativeFilePath, chk.Equals, dstRelativeFilePath)
// look up the path from the expected transfers, make sure it exists
_, transferExist := lookupMap[srcRelativeFilePath]
c.Assert(transferExist, chk.Equals, true)
}
}
func validateRemoveTransfersAreScheduled(c *chk.C, isSrcEncoded bool, expectedTransfers []string, mockedRPC interceptor) {
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(expectedTransfers))
// validate that the right transfers were sent
lookupMap := scenarioHelper{}.convertListToMap(expectedTransfers)
for _, transfer := range mockedRPC.transfers {
srcRelativeFilePath := transfer.Source
if isSrcEncoded {
srcRelativeFilePath, _ = url.PathUnescape(srcRelativeFilePath)
}
// look up the source from the expected transfers, make sure it exists
_, srcExist := lookupMap[srcRelativeFilePath]
c.Assert(srcExist, chk.Equals, true)
}
}
func getDefaultSyncRawInput(src, dst string) rawSyncCmdArgs {
deleteDestination := common.EDeleteDestination.True()
return rawSyncCmdArgs{
src: src,
dst: dst,
recursive: true,
logVerbosity: defaultLogVerbosityForSync,
deleteDestination: deleteDestination.String(),
md5ValidationOption: common.DefaultHashValidationOption.String(),
}
}
func getDefaultCopyRawInput(src string, dst string) rawCopyCmdArgs {
return rawCopyCmdArgs{
src: src,
dst: dst,
logVerbosity: defaultLogVerbosityForSync,
blobType: common.EBlobType.None().String(),
blockBlobTier: common.EBlockBlobTier.None().String(),
pageBlobTier: common.EPageBlobTier.None().String(),
md5ValidationOption: common.DefaultHashValidationOption.String(),
s2sInvalidMetadataHandleOption: defaultS2SInvalideMetadataHandleOption.String(),
}
}
func getDefaultRemoveRawInput(src string, targetingBlob bool) rawCopyCmdArgs {
fromTo := common.EFromTo.BlobTrash()
if !targetingBlob {
fromTo = common.EFromTo.FileTrash()
}
return rawCopyCmdArgs{
src: src,
fromTo: fromTo.String(),
logVerbosity: defaultLogVerbosityForSync,
blobType: common.EBlobType.None().String(),
blockBlobTier: common.EBlockBlobTier.None().String(),
pageBlobTier: common.EPageBlobTier.None().String(),
md5ValidationOption: common.DefaultHashValidationOption.String(),
s2sInvalidMetadataHandleOption: defaultS2SInvalideMetadataHandleOption.String(),
}
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-azcopy/ste"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/Azure/azure-storage-file-go/azfile"
chk "gopkg.in/check.v1"
"path/filepath"
"strings"
)
type genericTraverserSuite struct{}
var _ = chk.Suite(&genericTraverserSuite{})
// validate traversing a single Blob, a single Azure File, and a single local file
// compare that the traversers get consistent results
func (s *genericTraverserSuite) TestTraverserWithSingleObject(c *chk.C) {
bsu := getBSU()
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
fsu := getFSU()
shareURL, shareName := createNewAzureShare(c, fsu)
defer deleteShare(c, shareURL)
// test two scenarios, either blob is at the root virtual dir, or inside sub virtual dirs
for _, storedObjectName := range []string{"sub1/sub2/singleblobisbest", "nosubsingleblob", "满汉全席.txt"} {
// set up the container with a single blob
blobList := []string{storedObjectName}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobList)
// set up the directory as a single file
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
dstFileName := storedObjectName
scenarioHelper{}.generateLocalFilesFromList(c, dstDirName, blobList)
// construct a local traverser
localTraverser := newLocalTraverser(filepath.Join(dstDirName, dstFileName), false, func() {})
// invoke the local traversal with a dummy processor
localDummyProcessor := dummyProcessor{}
err := localTraverser.traverse(localDummyProcessor.process, nil)
c.Assert(err, chk.IsNil)
c.Assert(len(localDummyProcessor.record), chk.Equals, 1)
// construct a blob traverser
ctx := context.WithValue(context.TODO(), ste.ServiceAPIVersionOverride, ste.DefaultServiceApiVersion)
p := azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{})
rawBlobURLWithSAS := scenarioHelper{}.getRawBlobURLWithSAS(c, containerName, blobList[0])
blobTraverser := newBlobTraverser(&rawBlobURLWithSAS, p, ctx, false, func() {})
// invoke the blob traversal with a dummy processor
blobDummyProcessor := dummyProcessor{}
err = blobTraverser.traverse(blobDummyProcessor.process, nil)
c.Assert(err, chk.IsNil)
c.Assert(len(blobDummyProcessor.record), chk.Equals, 1)
// assert the important info are correct
c.Assert(localDummyProcessor.record[0].name, chk.Equals, blobDummyProcessor.record[0].name)
c.Assert(localDummyProcessor.record[0].relativePath, chk.Equals, blobDummyProcessor.record[0].relativePath)
// Azure File cannot handle names with '/' in them
if !strings.Contains(storedObjectName, "/") {
// set up the Azure Share with a single file
fileList := []string{storedObjectName}
scenarioHelper{}.generateAzureFilesFromList(c, shareURL, fileList)
// construct an Azure file traverser
filePipeline := azfile.NewPipeline(azfile.NewAnonymousCredential(), azfile.PipelineOptions{})
rawFileURLWithSAS := scenarioHelper{}.getRawFileURLWithSAS(c, shareName, fileList[0])
azureFileTraverser := newFileTraverser(&rawFileURLWithSAS, filePipeline, ctx, false, func() {})
// invoke the file traversal with a dummy processor
fileDummyProcessor := dummyProcessor{}
err = azureFileTraverser.traverse(fileDummyProcessor.process, nil)
c.Assert(err, chk.IsNil)
c.Assert(len(fileDummyProcessor.record), chk.Equals, 1)
c.Assert(localDummyProcessor.record[0].relativePath, chk.Equals, fileDummyProcessor.record[0].relativePath)
c.Assert(localDummyProcessor.record[0].name, chk.Equals, fileDummyProcessor.record[0].name)
}
}
}
// validate traversing a container, a share, and a local directory containing the same objects
// compare that traversers get consistent results
func (s *genericTraverserSuite) TestTraverserContainerAndLocalDirectory(c *chk.C) {
bsu := getBSU()
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
fsu := getFSU()
shareURL, shareName := createNewAzureShare(c, fsu)
defer deleteShare(c, shareURL)
// set up the container with numerous blobs
fileList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
c.Assert(containerURL, chk.NotNil)
// set up an Azure File Share with the same files
scenarioHelper{}.generateAzureFilesFromList(c, shareURL, fileList)
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
scenarioHelper{}.generateLocalFilesFromList(c, dstDirName, fileList)
// test two scenarios, either recursive or not
for _, isRecursiveOn := range []bool{true, false} {
// construct a local traverser
localTraverser := newLocalTraverser(dstDirName, isRecursiveOn, func() {})
// invoke the local traversal with an indexer
// so that the results are indexed for easy validation
localIndexer := newObjectIndexer()
err := localTraverser.traverse(localIndexer.store, nil)
c.Assert(err, chk.IsNil)
// construct a blob traverser
ctx := context.WithValue(context.TODO(), ste.ServiceAPIVersionOverride, ste.DefaultServiceApiVersion)
p := azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{})
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
blobTraverser := newBlobTraverser(&rawContainerURLWithSAS, p, ctx, isRecursiveOn, func() {})
// invoke the local traversal with a dummy processor
blobDummyProcessor := dummyProcessor{}
err = blobTraverser.traverse(blobDummyProcessor.process, nil)
c.Assert(err, chk.IsNil)
// construct an Azure File traverser
filePipeline := azfile.NewPipeline(azfile.NewAnonymousCredential(), azfile.PipelineOptions{})
rawFileURLWithSAS := scenarioHelper{}.getRawShareURLWithSAS(c, shareName)
azureFileTraverser := newFileTraverser(&rawFileURLWithSAS, filePipeline, ctx, isRecursiveOn, func() {})
// invoke the file traversal with a dummy processor
fileDummyProcessor := dummyProcessor{}
err = azureFileTraverser.traverse(fileDummyProcessor.process, nil)
c.Assert(err, chk.IsNil)
// make sure the results are the same
c.Assert(len(blobDummyProcessor.record), chk.Equals, len(localIndexer.indexMap))
c.Assert(len(fileDummyProcessor.record), chk.Equals, len(localIndexer.indexMap))
for _, storedObject := range append(blobDummyProcessor.record, fileDummyProcessor.record...) {
correspondingLocalFile, present := localIndexer.indexMap[storedObject.relativePath]
c.Assert(present, chk.Equals, true)
c.Assert(correspondingLocalFile.name, chk.Equals, storedObject.name)
if !isRecursiveOn {
c.Assert(strings.Contains(storedObject.relativePath, common.AZCOPY_PATH_SEPARATOR_STRING), chk.Equals, false)
}
}
}
}
// validate traversing a virtual and a local directory containing the same objects
// compare that blob and local traversers get consistent results
func (s *genericTraverserSuite) TestTraverserWithVirtualAndLocalDirectory(c *chk.C) {
bsu := getBSU()
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
fsu := getFSU()
shareURL, shareName := createNewAzureShare(c, fsu)
defer deleteShare(c, shareURL)
// set up the container with numerous blobs
virDirName := "virdir"
fileList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, virDirName+"/")
c.Assert(containerURL, chk.NotNil)
// set up an Azure File Share with the same files
scenarioHelper{}.generateAzureFilesFromList(c, shareURL, fileList)
// set up the destination with a folder that have the exact same files
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
scenarioHelper{}.generateLocalFilesFromList(c, dstDirName, fileList)
// test two scenarios, either recursive or not
for _, isRecursiveOn := range []bool{true, false} {
// construct a local traverser
localTraverser := newLocalTraverser(filepath.Join(dstDirName, virDirName), isRecursiveOn, func() {})
// invoke the local traversal with an indexer
// so that the results are indexed for easy validation
localIndexer := newObjectIndexer()
err := localTraverser.traverse(localIndexer.store, nil)
c.Assert(err, chk.IsNil)
// construct a blob traverser
ctx := context.WithValue(context.TODO(), ste.ServiceAPIVersionOverride, ste.DefaultServiceApiVersion)
p := azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{})
rawVirDirURLWithSAS := scenarioHelper{}.getRawBlobURLWithSAS(c, containerName, virDirName)
blobTraverser := newBlobTraverser(&rawVirDirURLWithSAS, p, ctx, isRecursiveOn, func() {})
// invoke the local traversal with a dummy processor
blobDummyProcessor := dummyProcessor{}
err = blobTraverser.traverse(blobDummyProcessor.process, nil)
c.Assert(err, chk.IsNil)
// construct an Azure File traverser
filePipeline := azfile.NewPipeline(azfile.NewAnonymousCredential(), azfile.PipelineOptions{})
rawFileURLWithSAS := scenarioHelper{}.getRawFileURLWithSAS(c, shareName, virDirName)
azureFileTraverser := newFileTraverser(&rawFileURLWithSAS, filePipeline, ctx, isRecursiveOn, func() {})
// invoke the file traversal with a dummy processor
fileDummyProcessor := dummyProcessor{}
err = azureFileTraverser.traverse(fileDummyProcessor.process, nil)
c.Assert(err, chk.IsNil)
// make sure the results are the same
c.Assert(len(blobDummyProcessor.record), chk.Equals, len(localIndexer.indexMap))
c.Assert(len(fileDummyProcessor.record), chk.Equals, len(localIndexer.indexMap))
for _, storedObject := range append(blobDummyProcessor.record, fileDummyProcessor.record...) {
correspondingLocalFile, present := localIndexer.indexMap[storedObject.relativePath]
c.Assert(present, chk.Equals, true)
c.Assert(correspondingLocalFile.name, chk.Equals, storedObject.name)
c.Assert(correspondingLocalFile.isMoreRecentThan(storedObject), chk.Equals, true)
if !isRecursiveOn {
c.Assert(strings.Contains(storedObject.relativePath, common.AZCOPY_PATH_SEPARATOR_STRING), chk.Equals, false)
}
}
}
}
<file_sep>version: '3'
services:
azure-storage-azcopy:
build:
context: .
dockerfile: Dockerfile
volumes:
- .:${WORK_DIR}
working_dir: ${WORK_DIR}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"errors"
"fmt"
"net/url"
"strings"
"github.com/Azure/azure-storage-azcopy/common"
)
func validateFromTo(src, dst string, userSpecifiedFromTo string) (common.FromTo, error) {
inferredFromTo := inferFromTo(src, dst)
if userSpecifiedFromTo == "" {
// If user didn't explicitly specify FromTo, use what was inferred (if possible)
if inferredFromTo == common.EFromTo.Unknown() {
return common.EFromTo.Unknown(), fmt.Errorf("the inferred source/destination combination is currently not supported. Please post an issue on Github if support for this scenario is desired")
}
return inferredFromTo, nil
}
// User explicitly specified FromTo, make sure it matches what we infer or accept it if we can't infer
var userFromTo common.FromTo
err := userFromTo.Parse(userSpecifiedFromTo)
if err != nil {
return common.EFromTo.Unknown(), fmt.Errorf("invalid --from-to value specified: %q", userSpecifiedFromTo)
}
if inferredFromTo == common.EFromTo.Unknown() || inferredFromTo == userFromTo ||
userFromTo == common.EFromTo.BlobTrash() || userFromTo == common.EFromTo.FileTrash() {
// We couldn't infer the FromTo or what we inferred matches what the user specified
// We'll accept what the user specified
return userFromTo, nil
}
// inferredFromTo != raw.fromTo: What we inferred doesn't match what the user specified
return common.EFromTo.Unknown(), errors.New("the specified --from-to switch is inconsistent with the specified source/destination combination")
}
func inferFromTo(src, dst string) common.FromTo {
// Try to infer the 1st argument
srcLocation := inferArgumentLocation(src)
if srcLocation == srcLocation.Unknown() {
glcm.Info("Cannot infer source location of " +
common.URLStringExtension(src).RedactSecretQueryParamForLogging() +
". Please specify the --from-to switch")
return common.EFromTo.Unknown()
}
dstLocation := inferArgumentLocation(dst)
if dstLocation == dstLocation.Unknown() {
glcm.Info("Cannot infer destination location of " +
common.URLStringExtension(dst).RedactSecretQueryParamForLogging() +
". Please specify the --from-to switch")
return common.EFromTo.Unknown()
}
switch {
case srcLocation == common.ELocation.Local() && dstLocation == common.ELocation.Blob():
return common.EFromTo.LocalBlob()
case srcLocation == common.ELocation.Blob() && dstLocation == common.ELocation.Local():
return common.EFromTo.BlobLocal()
case srcLocation == common.ELocation.Local() && dstLocation == common.ELocation.File():
return common.EFromTo.LocalFile()
case srcLocation == common.ELocation.File() && dstLocation == common.ELocation.Local():
return common.EFromTo.FileLocal()
case srcLocation == common.ELocation.Pipe() && dstLocation == common.ELocation.Blob():
return common.EFromTo.PipeBlob()
case srcLocation == common.ELocation.Blob() && dstLocation == common.ELocation.Pipe():
return common.EFromTo.BlobPipe()
case srcLocation == common.ELocation.Local() && dstLocation == common.ELocation.BlobFS():
return common.EFromTo.LocalBlobFS()
case srcLocation == common.ELocation.BlobFS() && dstLocation == common.ELocation.Local():
return common.EFromTo.BlobFSLocal()
case srcLocation == common.ELocation.Blob() && dstLocation == common.ELocation.Blob():
return common.EFromTo.BlobBlob()
case srcLocation == common.ELocation.File() && dstLocation == common.ELocation.Blob():
return common.EFromTo.FileBlob()
case srcLocation == common.ELocation.S3() && dstLocation == common.ELocation.Blob():
return common.EFromTo.S3Blob()
}
return common.EFromTo.Unknown()
}
func inferArgumentLocation(arg string) common.Location {
if arg == pipeLocation {
return common.ELocation.Pipe()
}
if startsWith(arg, "https") {
// Let's try to parse the argument as a URL
u, err := url.Parse(arg)
// NOTE: sometimes, a local path can also be parsed as a url. To avoid thinking it's a URL, check Scheme, Host, and Path
if err == nil && u.Scheme != "" || u.Host != "" || u.Path != "" {
// Is the argument a URL to blob storage?
switch host := strings.ToLower(u.Host); true {
// Azure Stack does not have the core.windows.net
case strings.Contains(host, ".blob"):
return common.ELocation.Blob()
case strings.Contains(host, ".file"):
return common.ELocation.File()
case strings.Contains(host, ".dfs"):
return common.ELocation.BlobFS()
}
if common.IsS3URL(*u) {
return common.ELocation.S3()
}
}
} else {
// If we successfully get the argument's file stats, then we'll infer that this argument is a local file
//_, err := os.Stat(arg)
//if err != nil && !os.IsNotExist(err){
// return ELocation.Unknown()
//}
return common.ELocation.Local()
}
return common.ELocation.Unknown()
}
<file_sep># AzCopy v10
## About
AzCopy (v10) is the next-generation command-line utility designed for copying data to/from Microsoft Azure Blob and File, using simple commands designed for optimal performance. You can copy data between a file system and a storage account, or between storage accounts.
## Features
* Copy data from Azure Blob containers/File shares to File system, and vice versa
* Copy block blobs between two Azure Storage accounts
* Sync a directory in local file system to Azure Blob, or vice versa
* List/Remove files and blobs in a given path
* Supports glob patterns in path, and --exclude flags
* Resillient: retries automatically after a failure, and supports resuming after a failed job
## What's new in v10 ?
* Synchronize a file system up to Azure Blob or vice versa. Use `azcopy sync <source> <destination>`
* Supports Azure Data Lake Storage Gen2. Use `myaccount.dfs.core.windows.net` for the URI to use ADLS Gen2 APIs.
* Supports copying an entire account (Blob service only) to another account. Use `azcopy cp https://myaccount.blob.core.windows.net https://myotheraccount.blob.core.windows.net` which will enumerate all Blob containers and copy to the destination account
* Supports [copying data from AWS S3](https://github.com/Azure/azure-storage-azcopy/wiki/Copy-from-AWS-S3)
* Account to account copy is now using the new Put from URL APIs that will copy the data directly from one storage account to another. No data transfer is needed down to the client where AzCopy runs. Therefore it is significantly faster!
* List/Remove files and blobs in a given path
* Supports glob patterns in path, and --exclude flags
* Every AzCopy run will create a job order, and a related log file. You can view and restart previous jobs using `azcopy jobs` command.
* Improved performance all around!
## Installation
1. Download the AzCopy executable using one of the following links:
* [Windows x64](https://aka.ms/downloadazcopy-v10-windows) (zip)
* [Linux x64](https://aka.ms/downloadazcopy-v10-linux) (tar.gz)
* [MacOS x64](https://aka.ms/downloadazcopy-v10-mac) (zip)
2. Unzip (or untar on Linux) and get started
On Linux:
```
wget -O azcopyv10.tar.gz https://aka.ms/downloadazcopy-v10-linux
tar -xzf azcopyv10.tar.gz
cd azcopy_linux_amd64_10.*
./azcopy
```
On Windows:
```
Invoke-WebRequest -Uri https://aka.ms/downloadazcopy-v10-windows -OutFile .\azcopyv10.zip
Expand-Archive azcopyv10.zip -DestinationPath .
cd .\azcopy_windows_amd64_10.*
.\azcopy.exe
```
## Manual
### Authenticating with Azure Storage
AzCopy supports two types of authentication. See the table below to see which type you need to use.
* **Pre-signed URLs** (URLs with Shared Access Signature aka. **SAS tokens**): Simply generate a SAS token from the Azure Portal, Storage Explorer, or one of the other Azure tools and append to the Blob path (container/virtual directory/blob path).
* **Azure Active Directory Authentication** : Add your user to the **'Blob Data Contributor'** role in the Azure Portal, and log on to AzCopy using `azcopy login`. To authenticate with MSI, use `azcopy login --identity`. Once logged in, you can simply use AzCopy commands without any SAS token appended to the path. e.g. `azcopy cp https://myaccount.blob.core.windows.net/container/data /mydata --recursive`
| Azure Storage service | Supported authentication methods |
| ------------- | ------------- |
| Blob storage | SAS tokens OR Azure Active Directory Authentication |
| File storage | SAS tokens |
| ADLS Gen2 | Azure Active Directory Authentication |
> :exclamation::exclamation::exclamation:Note a [SAS token](https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1) is *NOT* an account key. SAS tokens are limited in scope and validity, and starts with a question mark which can be appended to a Blob URL. Here is an example: `?sv=2017-11-09&ss=bf&srt=co&sp=rwac&se=2018-11-16T03:59:09Z&st=2018-11-15T19:59:09Z&sip=10.102.166.17&spr=https,http&sig=k8xSm2K3crBbtNpfoxyvh9n%2BMjDTqRk2XpY8JYIAMaA%3D`.
### Getting started
AzCopy is self-documenting. To list the available commands, run:
```
./azcopy -h
```
To view the help page and examples, run:
```
./azcopy <cmd> -h
```
### Simple command-line syntax
```
# The general syntax
./azcopy <cmd> <arguments> --<flag-name>=<flag-value>
# Example:
./azcopy cp <source path> <destination path> --<flag-name>=<flag-value>
./azcopy cp "/path/to/local" "https://account.blob.core.windows.net/container?sastoken" --recursive=true
./azcopy cp "/mnt/myfile.txt" "https://myaccount.blob.core.windows.net/mycontainer/myfile.txt?sv=2017-11-09&ss=bf&srt=co&sp=rwac&se=2018-11-16T03:59:09Z&st=2018-11-15T19:59:09Z&sip=10.102.166.17&spr=https,http&sig=k8xSm2K3crBbtNpfoxyvh9n%2BMjDTqRk2XpY8JYIAMaA%3D"
```
To see more examples:
```
./azcopy cp -h
```
Each transfer operation will create a `Job` for AzCopy to act on. You can view the history of jobs using the following command:
```
./azcopy jobs list
```
The job logs and data are located under the $HOME/.azcopy directory on Linux, and %USERPROFILE%\.azcopy on Windows. You can clear the job data/logs if you wish after AzCopy completes the transfers.
### Copy data to Azure storage
The copy command can be used to transfer data from the source to the destination. The locat can be a:
1. local path
2. URL to Azure Blob/Virtual Directory/Container
3. URL to Azure File/Directory/File Share
```
./azcopy <command> <source path> <destination path>
```
The following command will upload `1file.txt` to the Block Blob at `https://myaccount.blob.core.windows.net/mycontainer/1file.txt`.
```
./azcopy cp /data/1file.txt "https://myaccount.blob.core.windows.net/mycontainer/1file.txt?sastokenhere"
```
The following command will upload all files under `directory1` recursively to the path at `https://myaccount.blob.core.windows.net/mycontainer/directory1`.
```
./azcopy cp /data/directory1 "https://myaccount.blob.core.windows.net/mycontainer/directory1?sastokenhere" --recursive=true
```
The following command will upload all files directly under `directory1` without recursing into sub-directories, to the path at `https://myaccount.blob.core.windows.net/mycontainer/directory1`.
```
./azcopy cp /data/directory1/* "https://myaccount.blob.core.windows.net/mycontainer/directory1?sastokenhere"
```
To upload into File storage, simply change the URI to Azure File URI with corresponding SAS token.
### Copy VHD image to Azure Storage
AzCopy by default uploads data into Block Blobs. However if a source file has `.vhd` extension, AzCopy will default to uploading to a Page Blob.
### Copy data from Azure to local file systems
The following will download all Blob container contents into the local file system creating the directory `mycontainer` in the destination.
```
./azcopy cp "https://myaccount.blob.core.windows.net/mycontainer?sastokenhere" /data/ --recursive=true
```
The following will download all Blob container contents into the local file system. `mycontainer` directory will not be created in the destination because the globbing pattern looks for all paths inside `mycontainer` in the source rather than the `mycontainer` container itself.
```
./azcopy cp "https://myaccount.blob.core.windows.net/mycontainer/*?sastokenhere" /data/ --recursive=true
```
The following command will download all txt files in the source to the `directory1` path. Note that AzCopy will scan the entire source and filter for `.txt` files. This may take a while when you have thousands/millions of files in the source.
```
./azcopy cp "https://myaccount.blob.core.windows.net/mycontainer/directory1/*.txt?sastokenhere" /data/directory1
```
### Copy data between Azure Storage accounts (currently supports Block Blobs only)
Copying data between two Azure Storage accounts make use of the PutBlockFromURL API, and does not use the client machine's network bandwidth. Data is copied between two Azure Storage servers. AzCopy simply orchestrates the copy operation.
```
./azcopy cp "https://myaccount.blob.core.windows.net/?sastokenhere" "https://myotheraccount.blob.core.windows.net/?sastokenhere" --recursive=true
```
### Advanced Use Cases
#### Configure Concurrency
Set the environment variable `AZCOPY_CONCURRENCY_VALUE` to configure the number of concurrent requests. This is set to 300 by default. Note that this does not equal to 300 parallel connections. Reducing this will limit the bandwidth, and CPU used by AzCopy.
#### Configure proxy settings
To configure the proxy settings for AzCopy v10, set the environment variable https_proxy using the following command:
```
# For Windows:
set https_proxy=<proxy IP>:<proxy port>
# For Linux:
export https_proxy=<proxy IP>:<proxy port>
# For MacOS
export https_proxy=<proxy IP>:<proxy port>
```
For proxy authentication, use the following format:
```
export https_proxy=<user>:<pass>@<proxy IP>:<proxy port>
# or with a domain:
export https_proxy=<domain>%5C<user>:<pass>@<proxy IP>:<proxy port>
```
### Configure log location
Set the environment variable 'AZCOPY_LOG_LOCATION' to a directory of your choice where there is plenty of disk space as logs for large data transfers may use up Gigabytes of space depending on the chosen log level.
## Troubleshooting and Reporting Issues
### Check Logs for errors
AzCopy creates a log file for all the jobs. Look for clues in the logs to understand the problem. AzCopy will print UPLOADFAILED, COPYFAILED, and DOWNLOADFAILED strings for failures with the paths along with the error reason.
cat 04dc9ca9-158f-7945-5933-564021086c79.log | grep -i UPLOADFAILED
### View and resume jobs
To view the job stats, run:
```
./azcopy jobs show <job-id>
```
To see the transfers of a specific status(Success or Failed), run:
```
./azcopy jobs show <job-id> --with-status=Failed
```
You can resume a failed/cancelled job using its identifier along with the SAS token(s), which are not persisted for security reasons.
```
./azcopy jobs resume <jobid> --source-sas ?sastokenhere --destination-sas ?sastokenhere
```
### Raise an Issue
Raise an issue on this repository for any feedback or issue encountered.
### FAQ
- What is the difference between `sync` and `copy`?
- The `copy` command is a simple transferring operation, it scans the source and attempts to transfer every single file/blob. The supported source/destination pairs are listed in the help message of the tool. On the other hand, `sync` makes sure that whatever is present in the source will be replicated to the destination, and also whatever is not at the source will be deleted from the destination. If your goal is to simply move some files, then `copy` is definitely the right command, since it offers much better performance.
- For `sync`, last modified times are used to determine whether to transfer the same file present at both the source and the destination.
- If the use case is to incrementally transfer data, then `sync` is the better choice, since only the modified/missing files are transferred.
- Will `copy` overwrite my files?
- By default, AzCopy will overwrite the files at the destination if they already exist. To avoid this behavior, please use the flag `--overwrite=false`.
## Contributing
This project welcomes contributions and suggestions. Most contributions require you to agree to a
Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
the rights to use your contribution. For details, visit https://cla.microsoft.com.
When you submit a pull request, a CLA-bot will automatically determine whether you need to provide
a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions
provided by the bot. You will only need to do this once across all repos using our CLA.
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
contact [<EMAIL>](mailto:<EMAIL>) with any additional questions or comments.
<file_sep>package ste
import (
"context"
"fmt"
"net/http"
"sync/atomic"
"time"
"net/url"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/azbfs"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/Azure/azure-storage-file-go/azfile"
)
type IJobPartTransferMgr interface {
FromTo() common.FromTo
Info() TransferInfo
BlobDstData(dataFileToXfer []byte) (headers azblob.BlobHTTPHeaders, metadata azblob.Metadata)
FileDstData(dataFileToXfer []byte) (headers azfile.FileHTTPHeaders, metadata azfile.Metadata)
BfsDstData(dataFileToXfer []byte) (headers azbfs.BlobFSHTTPHeaders)
LastModifiedTime() time.Time
PreserveLastModifiedTime() (time.Time, bool)
ShouldPutMd5() bool
MD5ValidationOption() common.HashValidationOption
BlobTypeOverride() common.BlobType
BlobTiers() (blockBlobTier common.BlockBlobTier, pageBlobTier common.PageBlobTier)
JobHasLowFileCount() bool
//ScheduleChunk(chunkFunc chunkFunc)
Context() context.Context
SlicePool() common.ByteSlicePooler
CacheLimiter() common.CacheLimiter
FileCountLimiter() common.CacheLimiter
StartJobXfer()
IsForceWriteTrue() bool
ReportChunkDone(id common.ChunkID) (lastChunk bool, chunksDone uint32)
UnsafeReportChunkDone() (lastChunk bool, chunksDone uint32)
TransferStatus() common.TransferStatus
SetStatus(status common.TransferStatus)
SetErrorCode(errorCode int32)
SetNumberOfChunks(numChunks uint32)
SetActionAfterLastChunk(f func())
ReportTransferDone() uint32
RescheduleTransfer()
ScheduleChunks(chunkFunc chunkFunc)
Cancel()
WasCanceled() bool
// TODO: added for debugging purpose. remove later
OccupyAConnection()
// TODO: added for debugging purpose. remove later
ReleaseAConnection()
SourceProviderPipeline() pipeline.Pipeline
FailActiveUpload(where string, err error)
FailActiveDownload(where string, err error)
FailActiveUploadWithStatus(where string, err error, failureStatus common.TransferStatus)
FailActiveDownloadWithStatus(where string, err error, failureStatus common.TransferStatus)
FailActiveS2SCopy(where string, err error)
FailActiveS2SCopyWithStatus(where string, err error, failureStatus common.TransferStatus)
// TODO: Cleanup FailActiveUpload/FailActiveUploadWithStatus & FailActiveS2SCopy/FailActiveS2SCopyWithStatus
FailActiveSend(where string, err error)
FailActiveSendWithStatus(where string, err error, failureStatus common.TransferStatus)
LogUploadError(source, destination, errorMsg string, status int)
LogDownloadError(source, destination, errorMsg string, status int)
LogS2SCopyError(source, destination, errorMsg string, status int)
LogSendError(source, destination, errorMsg string, status int)
LogError(resource, context string, err error)
LogTransferInfo(level pipeline.LogLevel, source, destination, msg string)
LogTransferStart(source, destination, description string)
LogChunkStatus(id common.ChunkID, reason common.WaitReason)
ChunkStatusLogger() common.ChunkStatusLogger
LogAtLevelForCurrentTransfer(level pipeline.LogLevel, msg string)
common.ILogger
}
type TransferInfo struct {
BlockSize uint32
Source string
SourceSize int64
Destination string
// Transfer info for S2S copy
SrcProperties
S2SGetPropertiesInBackend bool
S2SSourceChangeValidation bool
S2SInvalidMetadataHandleOption common.InvalidMetadataHandleOption
// Blob
SrcBlobType azblob.BlobType // used for both S2S and for downloads to local from blob
S2SSrcBlobTier azblob.AccessTierType // AccessTierType (string) is used to accommodate service-side support matrix change.
// NumChunks is the number of chunks in which transfer will be split into while uploading the transfer.
// NumChunks is not used in case of AppendBlob transfer.
NumChunks uint16
}
type SrcProperties struct {
SrcHTTPHeaders common.ResourceHTTPHeaders // User for S2S copy, where per transfer's src properties need be set in destination.
SrcMetadata common.Metadata
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
type chunkFunc func(int)
// jobPartTransferMgr represents the runtime information for a Job Part's transfer
type jobPartTransferMgr struct {
jobPartMgr IJobPartMgr // Refers to the "owning" Job Part
jobPartPlanTransfer *JobPartPlanTransfer
transferIndex uint32
// the context of this transfer; allows any failing chunk to cancel the whole transfer
ctx context.Context
// Call cancel to cancel the transfer
cancel context.CancelFunc
numChunks uint32
actionAfterLastChunk func()
// NumberOfChunksDone represents the number of chunks of a transfer
// which are either completed or failed.
// NumberOfChunksDone determines the final cancellation or completion of a transfer
atomicChunksDone uint32
// used defensively to protect against accidental double counting
atomicCompletionIndicator uint32
/*
@Parteek removed 3/23 morning, as jeff ad equivalent
// transfer chunks are put into this channel and execution engine takes chunk out of this channel.
chunkChannel chan<- ChunkMsg*/
}
func (jptm *jobPartTransferMgr) FromTo() common.FromTo {
return jptm.jobPartMgr.Plan().FromTo
}
func (jptm *jobPartTransferMgr) StartJobXfer() {
jptm.jobPartMgr.StartJobXfer(jptm)
}
func (jptm *jobPartTransferMgr) IsForceWriteTrue() bool {
return jptm.jobPartMgr.IsForceWriteTrue()
}
func (jptm *jobPartTransferMgr) Info() TransferInfo {
plan := jptm.jobPartMgr.Plan()
src, dst := plan.TransferSrcDstStrings(jptm.transferIndex)
dstBlobData := plan.DstBlobData
srcHTTPHeaders, srcMetadata, srcBlobType, srcBlobTier, s2sGetPropertiesInBackend, s2sSourceChangeValidation, s2sInvalidMetadataHandleOption :=
plan.TransferSrcPropertiesAndMetadata(jptm.transferIndex)
srcSAS, dstSAS := jptm.jobPartMgr.SAS()
// If the length of destination SAS is greater than 0
// it means the destination is remote url and destination SAS
// has been stripped from the destination before persisting it in
// part plan file.
// SAS needs to be appended before executing the transfer
if len(dstSAS) > 0 {
dUrl, e := url.Parse(dst)
if e != nil {
panic(e)
}
if len(dUrl.RawQuery) > 0 {
dUrl.RawQuery += "&" + dstSAS
} else {
dUrl.RawQuery = dstSAS
}
dst = dUrl.String()
}
// If the length of source SAS is greater than 0
// it means the source is a remote url and source SAS
// has been stripped from the source before persisting it in
// part plan file.
// SAS needs to be appended before executing the transfer
if len(srcSAS) > 0 {
sUrl, e := url.Parse(src)
if e != nil {
panic(e)
}
if len(sUrl.RawQuery) > 0 {
sUrl.RawQuery += "&" + srcSAS
} else {
sUrl.RawQuery = srcSAS
}
src = sUrl.String()
}
sourceSize := plan.Transfer(jptm.transferIndex).SourceSize
var blockSize = dstBlobData.BlockSize
// If the blockSize is 0, then User didn't provide any blockSize
// We need to set the blockSize in such way that number of blocks per blob
// does not exceeds 50000 (max number of block per blob)
if blockSize == 0 {
blockSize = uint32(common.DefaultBlockBlobBlockSize)
for ; uint32(sourceSize/int64(blockSize)) > common.MaxNumberOfBlocksPerBlob; blockSize = 2 * blockSize {
}
}
blockSize = common.Iffuint32(blockSize > common.MaxBlockBlobBlockSize, common.MaxBlockBlobBlockSize, blockSize)
return TransferInfo{
BlockSize: blockSize,
Source: src,
SourceSize: sourceSize,
Destination: dst,
S2SGetPropertiesInBackend: s2sGetPropertiesInBackend,
S2SSourceChangeValidation: s2sSourceChangeValidation,
S2SInvalidMetadataHandleOption: s2sInvalidMetadataHandleOption,
SrcProperties: SrcProperties{
SrcHTTPHeaders: srcHTTPHeaders,
SrcMetadata: srcMetadata,
},
SrcBlobType: srcBlobType,
S2SSrcBlobTier: srcBlobTier,
}
}
func (jptm *jobPartTransferMgr) Context() context.Context {
return jptm.ctx
}
func (jptm *jobPartTransferMgr) SlicePool() common.ByteSlicePooler {
return jptm.jobPartMgr.SlicePool()
}
func (jptm *jobPartTransferMgr) CacheLimiter() common.CacheLimiter {
return jptm.jobPartMgr.CacheLimiter()
}
func (jptm *jobPartTransferMgr) FileCountLimiter() common.CacheLimiter {
return jptm.jobPartMgr.FileCountLimiter()
}
func (jptm *jobPartTransferMgr) RescheduleTransfer() {
jptm.jobPartMgr.RescheduleTransfer(jptm)
}
func (jptm *jobPartTransferMgr) ScheduleChunks(chunkFunc chunkFunc) {
jptm.jobPartMgr.ScheduleChunks(chunkFunc)
}
func (jptm *jobPartTransferMgr) BlobDstData(dataFileToXfer []byte) (headers azblob.BlobHTTPHeaders, metadata azblob.Metadata) {
return jptm.jobPartMgr.(*jobPartMgr).blobDstData(jptm.Info().Source, dataFileToXfer)
}
func (jptm *jobPartTransferMgr) FileDstData(dataFileToXfer []byte) (headers azfile.FileHTTPHeaders, metadata azfile.Metadata) {
return jptm.jobPartMgr.(*jobPartMgr).fileDstData(jptm.Info().Source, dataFileToXfer)
}
func (jptm *jobPartTransferMgr) BfsDstData(dataFileToXfer []byte) (headers azbfs.BlobFSHTTPHeaders) {
return jptm.jobPartMgr.(*jobPartMgr).bfsDstData(jptm.Info().Source, dataFileToXfer)
}
// TODO refactor into something like jptm.IsLastModifiedTimeEqual() so that there is NO LastModifiedTime method and people therefore CAN'T do it wrong due to time zone
func (jptm *jobPartTransferMgr) LastModifiedTime() time.Time {
return time.Unix(0, jptm.jobPartPlanTransfer.ModifiedTime)
}
// PreserveLastModifiedTime checks for the PreserveLastModifiedTime flag in JobPartPlan of a transfer.
// If PreserveLastModifiedTime is set to true, it returns the lastModifiedTime of the source.
func (jptm *jobPartTransferMgr) PreserveLastModifiedTime() (time.Time, bool) {
if preserveLastModifiedTime := jptm.jobPartMgr.(*jobPartMgr).localDstData().PreserveLastModifiedTime; preserveLastModifiedTime {
lastModifiedTime := jptm.jobPartPlanTransfer.ModifiedTime
return time.Unix(0, lastModifiedTime), true
}
return time.Time{}, false
}
func (jptm *jobPartTransferMgr) ShouldPutMd5() bool {
return jptm.jobPartMgr.ShouldPutMd5()
}
func (jptm *jobPartTransferMgr) MD5ValidationOption() common.HashValidationOption {
return jptm.jobPartMgr.(*jobPartMgr).localDstData().MD5VerificationOption
}
func (jptm *jobPartTransferMgr) BlobTypeOverride() common.BlobType {
return jptm.jobPartMgr.BlobTypeOverride()
}
func (jptm *jobPartTransferMgr) BlobTiers() (blockBlobTier common.BlockBlobTier, pageBlobTier common.PageBlobTier) {
return jptm.jobPartMgr.BlobTiers()
}
// JobHasLowFileCount returns an estimate of whether we only have a very small number of files in the overall job
// (An "estimate" because it actually only looks at the current job part)
func (jptm *jobPartTransferMgr) JobHasLowFileCount() bool {
// TODO: review this guestimated threshold
// Threshold is chosen because for a single large file (in Windows-based test configuration with approx 9.5 Gps disks)
// one file gets between 2 or 5 Gbps (depending on other factors), but we really want at least 4 times that throughput.
// So a minimal threshold would be 4.
const lowFileCountThreshold = 4
return jptm.jobPartMgr.Plan().NumTransfers < lowFileCountThreshold
}
func (jptm *jobPartTransferMgr) SetNumberOfChunks(numChunks uint32) {
jptm.numChunks = numChunks
}
func (jptm *jobPartTransferMgr) SetActionAfterLastChunk(f func()) {
jptm.actionAfterLastChunk = f
}
// Call Done when a chunk has completed its transfer; this method returns the number of chunks completed so far
func (jptm *jobPartTransferMgr) ReportChunkDone(id common.ChunkID) (lastChunk bool, chunksDone uint32) {
// Tell the id to remember that we (the jptm) have been told about its completion
// Will panic if we've already been told about its completion before.
// Why? As defensive programming, since if we accidentally counted one chunk twice, we'd complete
// before another was finish. Which would be bad
id.SetCompletionNotificationSent()
// Do our actual processing
chunksDone = atomic.AddUint32(&jptm.atomicChunksDone, 1)
lastChunk = chunksDone == jptm.numChunks
if lastChunk {
jptm.runActionAfterLastChunk()
}
return lastChunk, chunksDone
}
// TODO: phase this method out. It's just here to support parts of the codebase that don't yet have chunk IDs
func (jptm *jobPartTransferMgr) UnsafeReportChunkDone() (lastChunk bool, chunksDone uint32) {
return jptm.ReportChunkDone(common.NewChunkID("", 0))
}
// If an automatic action has been specified for after the last chunk, run it now
// (Prior to introduction of this routine, individual chunkfuncs had to check the return values
// of ReportChunkDone and then implement their own versions of the necessary transfer epilogue code.
// But that led to unwanted duplication of epilogue code, in the various types of chunkfunc. This routine
// makes it easier to create DRY epilogue code.)
func (jptm *jobPartTransferMgr) runActionAfterLastChunk() {
if jptm.actionAfterLastChunk != nil {
jptm.actionAfterLastChunk()
jptm.actionAfterLastChunk = nil // make sure it can't be run again, since epilogue methods are not expected to be idempotent
}
}
//
func (jptm *jobPartTransferMgr) TransferStatus() common.TransferStatus {
return jptm.jobPartPlanTransfer.TransferStatus()
}
// TransferStatus updates the status of given transfer for given jobId and partNumber
func (jptm *jobPartTransferMgr) SetStatus(status common.TransferStatus) {
jptm.jobPartPlanTransfer.SetTransferStatus(status, false)
}
// SetErrorCode updates the errorcode of transfer for given jobId and partNumber.
func (jptm *jobPartTransferMgr) ErrorCode() int32 {
return jptm.jobPartPlanTransfer.ErrorCode()
}
// SetErrorCode updates the errorcode of transfer for given jobId and partNumber.
func (jptm *jobPartTransferMgr) SetErrorCode(errorCode int32) {
// If the given errorCode is 0, then errorCode doesn't needs to be updated since default value
// of errorCode is 0.
if errorCode == 0 {
return
}
jptm.jobPartPlanTransfer.SetErrorCode(errorCode, false)
}
// TODO: Can we kill this method?
/*func (jptm *jobPartTransferMgr) ChunksDone() uint32 {
return atomic.LoadUint32(&jptm.atomicChunksDone)
}*/
func (jptm *jobPartTransferMgr) Cancel() { jptm.cancel() }
func (jptm *jobPartTransferMgr) WasCanceled() bool { return jptm.ctx.Err() != nil }
func (jptm *jobPartTransferMgr) ShouldLog(level pipeline.LogLevel) bool {
return jptm.jobPartMgr.ShouldLog(level)
}
func (jptm *jobPartTransferMgr) LogChunkStatus(id common.ChunkID, reason common.WaitReason) {
jptm.jobPartMgr.ChunkStatusLogger().LogChunkStatus(id, reason)
}
func (jptm *jobPartTransferMgr) ChunkStatusLogger() common.ChunkStatusLogger {
return jptm.jobPartMgr.ChunkStatusLogger()
}
// Add 1 to the active number of goroutine performing the transfer or executing the chunkFunc
// TODO: added for debugging purpose. remove later
func (jptm *jobPartTransferMgr) OccupyAConnection() {
jptm.jobPartMgr.OccupyAConnection()
}
// Sub 1 from the active number of goroutine performing the transfer or executing the chunkFunc
// TODO: added for debugging purpose. remove later
func (jptm *jobPartTransferMgr) ReleaseAConnection() {
jptm.jobPartMgr.ReleaseAConnection()
}
func (jptm *jobPartTransferMgr) FailActiveUpload(where string, err error) {
jptm.failActiveTransfer(transferErrorCodeUploadFailed, where, err, common.ETransferStatus.Failed())
}
func (jptm *jobPartTransferMgr) FailActiveDownload(where string, err error) {
jptm.failActiveTransfer(transferErrorCodeDownloadFailed, where, err, common.ETransferStatus.Failed())
}
func (jptm *jobPartTransferMgr) FailActiveS2SCopy(where string, err error) {
jptm.failActiveTransfer(transferErrorCodeCopyFailed, where, err, common.ETransferStatus.Failed())
}
func (jptm *jobPartTransferMgr) FailActiveUploadWithStatus(where string, err error, failureStatus common.TransferStatus) {
jptm.failActiveTransfer(transferErrorCodeUploadFailed, where, err, failureStatus)
}
func (jptm *jobPartTransferMgr) FailActiveDownloadWithStatus(where string, err error, failureStatus common.TransferStatus) {
jptm.failActiveTransfer(transferErrorCodeDownloadFailed, where, err, failureStatus)
}
func (jptm *jobPartTransferMgr) FailActiveS2SCopyWithStatus(where string, err error, failureStatus common.TransferStatus) {
jptm.failActiveTransfer(transferErrorCodeCopyFailed, where, err, failureStatus)
}
// TODO: FailActive* need be further refactored with a seperate workitem.
func (jptm *jobPartTransferMgr) TempJudgeUploadOrCopy() (isUpload, isCopy bool) {
fromTo := jptm.FromTo()
fromIsLocal := fromTo.From() == common.ELocation.Local()
toIsLocal := fromTo.To() == common.ELocation.Local()
isUpload = fromIsLocal && !toIsLocal
isCopy = !fromIsLocal && !toIsLocal
return isUpload, isCopy
}
func (jptm *jobPartTransferMgr) FailActiveSend(where string, err error) {
isUpload, isCopy := jptm.TempJudgeUploadOrCopy()
if isUpload {
jptm.FailActiveUpload(where, err)
} else if isCopy {
jptm.FailActiveS2SCopy(where, err)
} else {
panic("invalid state, FailActiveSend used by illegal direction")
}
}
func (jptm *jobPartTransferMgr) FailActiveSendWithStatus(where string, err error, failureStatus common.TransferStatus) {
isUpload, isCopy := jptm.TempJudgeUploadOrCopy()
if isUpload {
jptm.FailActiveUploadWithStatus(where, err, failureStatus)
} else if isCopy {
jptm.FailActiveS2SCopyWithStatus(where, err, failureStatus)
} else {
panic("invalid state, FailActiveSendWithStatus used by illegal direction")
}
}
// Use this to mark active transfers (i.e. those where chunk funcs have been scheduled) as failed.
// Unlike just setting the status to failed, this also handles cancellation correctly
func (jptm *jobPartTransferMgr) failActiveTransfer(typ transferErrorCode, descriptionOfWhereErrorOccurred string, err error, failureStatus common.TransferStatus) {
// TODO here we only act if the transfer is not yet canceled
// however, it's possible that this function is called simultaneously by different chunks
// in that case, the logs would be repeated
// as of april 9th, 2019, there's no obvious solution without adding more complexity into this part of the code, which is already not pretty and kind of everywhere
// consider redesign the lifecycle management in ste
if !jptm.WasCanceled() {
jptm.Cancel()
status, msg := ErrorEx{err}.ErrorCodeAndString()
requestID := ErrorEx{err}.MSRequestID()
fullMsg := fmt.Sprintf("%s. When %s. X-Ms-Request-Id: %s\n", msg, descriptionOfWhereErrorOccurred, requestID) // trailing \n to separate it better from any later, unrelated, log lines
jptm.logTransferError(typ, jptm.Info().Source, jptm.Info().Destination, fullMsg, status)
jptm.SetStatus(failureStatus)
jptm.SetErrorCode(int32(status)) // TODO: what are the rules about when this needs to be set, and doesn't need to be (e.g. for earlier failures)?
// If the status code was 403, it means there was an authentication error and we exit.
// User can resume the job if completely ordered with a new sas.
if status == http.StatusForbidden {
// quit right away, since without proper authentication no work can be done
// display a clear message
common.GetLifecycleMgr().Error(fmt.Sprintf("Authentication failed, it is either not correct, or expired, or does not have the correct permission %s", err.Error()))
}
}
// TODO: right now the convention re cancellation seems to be that if you cancel, you MUST both call cancel AND
// TODO: ... call ReportChunkDone (with the latter being done for ALL the expected chunks). Is that maintainable?
// TODO: ... Is that really ideal, having to call ReportChunkDone for all the chunks AFTER cancellation?
// TODO: ... but it is currently necessary,because of the way the transfer is only considered done (and automatic epilogue only triggers)
// TODO: ... if all expected chunks report as done
}
func (jptm *jobPartTransferMgr) PipelineLogInfo() pipeline.LogOptions {
return jptm.jobPartMgr.(*jobPartMgr).jobMgr.(*jobMgr).PipelineLogInfo()
}
func (jptm *jobPartTransferMgr) Log(level pipeline.LogLevel, msg string) {
plan := jptm.jobPartMgr.Plan()
jptm.jobPartMgr.Log(level, fmt.Sprintf("%s: [P#%d-T#%d] ", common.LogLevel(level), plan.PartNum, jptm.transferIndex)+msg)
}
func (jptm *jobPartTransferMgr) ErrorCodeAndString(err error) (int, string) {
switch e := err.(type) {
case azblob.StorageError:
return e.Response().StatusCode, e.Response().Status
case azfile.StorageError:
return e.Response().StatusCode, e.Response().Status
case azbfs.StorageError:
return e.Response().StatusCode, e.Response().Status
default:
return 0, err.Error()
}
}
type transferErrorCode string
const (
transferErrorCodeUploadFailed transferErrorCode = "UPLOADFAILED"
transferErrorCodeDownloadFailed transferErrorCode = "DOWNLOADFAILED"
transferErrorCodeCopyFailed transferErrorCode = "COPYFAILED"
)
func (jptm *jobPartTransferMgr) LogAtLevelForCurrentTransfer(level pipeline.LogLevel, msg string) {
// order of log elements here is mirrored, with some more added, in logTransferError
fullMsg := common.URLStringExtension(jptm.Info().Source).RedactSecretQueryParamForLogging() + " " +
msg +
" Dst: " + common.URLStringExtension(jptm.Info().Destination).RedactSecretQueryParamForLogging()
jptm.Log(level, fullMsg)
}
func (jptm *jobPartTransferMgr) logTransferError(errorCode transferErrorCode, source, destination, errorMsg string, status int) {
// order of log elements here is mirrored, in subset, in LogForCurrentTransfer
msg := fmt.Sprintf("%v: ", errorCode) + common.URLStringExtension(source).RedactSecretQueryParamForLogging() +
fmt.Sprintf(" : %03d : %s\n Dst: ", status, errorMsg) + common.URLStringExtension(destination).RedactSecretQueryParamForLogging()
jptm.Log(pipeline.LogError, msg)
}
func (jptm *jobPartTransferMgr) LogUploadError(source, destination, errorMsg string, status int) {
jptm.logTransferError(transferErrorCodeUploadFailed, source, destination, errorMsg, status)
}
func (jptm *jobPartTransferMgr) LogDownloadError(source, destination, errorMsg string, status int) {
jptm.logTransferError(transferErrorCodeDownloadFailed, source, destination, errorMsg, status)
}
func (jptm *jobPartTransferMgr) LogS2SCopyError(source, destination, errorMsg string, status int) {
jptm.logTransferError(transferErrorCodeCopyFailed, source, destination, errorMsg, status)
}
// TODO: Log*Error need be further refactored with a seperate workitem.
func (jptm *jobPartTransferMgr) LogSendError(source, destination, errorMsg string, status int) {
isUpload, isCopy := jptm.TempJudgeUploadOrCopy()
if isUpload {
jptm.LogUploadError(source, destination, errorMsg, status)
} else if isCopy {
jptm.LogS2SCopyError(source, destination, errorMsg, status)
} else {
panic("invalid state, LogSendError used by illegal direction")
}
}
func (jptm *jobPartTransferMgr) LogError(resource, context string, err error) {
status, msg := ErrorEx{err}.ErrorCodeAndString()
MSRequestID := ErrorEx{err}.MSRequestID()
jptm.Log(pipeline.LogError,
fmt.Sprintf("%s: %d: %s-%s. X-Ms-Request-Id:%s\n", common.URLStringExtension(resource).RedactSecretQueryParamForLogging(), status, context, msg, MSRequestID))
}
func (jptm *jobPartTransferMgr) LogTransferStart(source, destination, description string) {
jptm.Log(pipeline.LogInfo,
fmt.Sprintf("Starting transfer: Source %q Destination %q. %s",
common.URLStringExtension(source).RedactSecretQueryParamForLogging(),
common.URLStringExtension(destination).RedactSecretQueryParamForLogging(),
description))
}
func (jptm *jobPartTransferMgr) LogTransferInfo(level pipeline.LogLevel, source, destination, msg string) {
jptm.Log(level,
fmt.Sprintf("Transfer: Source %q Destination %q. %s",
common.URLStringExtension(source).RedactSecretQueryParamForLogging(),
common.URLStringExtension(destination).RedactSecretQueryParamForLogging(),
msg))
}
func (jptm *jobPartTransferMgr) Panic(err error) { jptm.jobPartMgr.Panic(err) }
// Call ReportTransferDone to report when a Transfer for this Job Part has completed
// TODO: I feel like this should take the status & we kill SetStatus
func (jptm *jobPartTransferMgr) ReportTransferDone() uint32 {
// In case of context leak in job part transfer manager.
jptm.Cancel()
// defensive programming check, to make sure this method is not called twice for the same transfer
// (since if it was, job would count us as TWO completions, and maybe miss another transfer that
// should have been counted but wasn't)
// TODO: it would be nice if this protection was actually in jobPartMgr.ReportTransferDone,
// but that's harder to implement (would imply need for a threadsafe map there, to track
// status by transfer). So for now we are going with the check here. This is the only call
// to the jobPartManager anyway (as it Feb 2019)
if atomic.SwapUint32(&jptm.atomicCompletionIndicator, 1) != 0 {
panic("cannot report the same transfer done twice")
}
return jptm.jobPartMgr.ReportTransferDone()
}
func (jptm *jobPartTransferMgr) SourceProviderPipeline() pipeline.Pipeline {
return jptm.jobPartMgr.SourceProviderPipeline()
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"github.com/Azure/azure-storage-azcopy/common"
chk "gopkg.in/check.v1"
"net/url"
"path/filepath"
"strings"
)
// regular local file->blob upload
func (s *cmdIntegrationSuite) TestUploadSingleFileToBlob(c *chk.C) {
bsu := getBSU()
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
for _, srcFileName := range []string{"singleblobisbest", "打麻将.txt", "%4509%4254$85140&"} {
// set up the source as a single file
srcDirName := scenarioHelper{}.generateLocalDirectory(c)
fileList := []string{srcFileName}
scenarioHelper{}.generateLocalFilesFromList(c, srcDirName, fileList)
// set up the destination container with a single blob
dstBlobName := "whatever"
scenarioHelper{}.generateBlobsFromList(c, containerURL, []string{dstBlobName})
c.Assert(containerURL, chk.NotNil)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawBlobURLWithSAS := scenarioHelper{}.getRawBlobURLWithSAS(c, containerName, dstBlobName)
raw := getDefaultCopyRawInput(filepath.Join(srcDirName, srcFileName), rawBlobURLWithSAS.String())
// the blob was created after the file, so no sync should happen
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
validateUploadTransfersAreScheduled(c, "", "", []string{""}, mockedRPC)
})
// clean the RPC for the next test
mockedRPC.reset()
// now target the destination container, the result should be the same
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw = getDefaultCopyRawInput(filepath.Join(srcDirName, srcFileName), rawContainerURLWithSAS.String())
// the file was created after the blob, so no sync should happen
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// verify explicitly since the source and destination names will be different:
// the source is "" since the given URL points to the blob itself
// the destination should be the blob name, since the given local path points to the parent dir
c.Assert(len(mockedRPC.transfers), chk.Equals, 1)
c.Assert(mockedRPC.transfers[0].Source, chk.Equals, "")
c.Assert(mockedRPC.transfers[0].Destination, chk.Equals, common.AZCOPY_PATH_SEPARATOR_STRING+url.PathEscape(srcFileName))
})
}
}
// regular directory->container upload
func (s *cmdIntegrationSuite) TestUploadDirectoryToContainer(c *chk.C) {
bsu := getBSU()
// set up the source with numerous files
srcDirPath := scenarioHelper{}.generateLocalDirectory(c)
fileList := scenarioHelper{}.generateCommonRemoteScenarioForLocal(c, srcDirPath, "")
// set up an empty container
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultCopyRawInput(srcDirPath, rawContainerURLWithSAS.String())
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(fileList))
// validate that the right transfers were sent
validateUploadTransfersAreScheduled(c, common.AZCOPY_PATH_SEPARATOR_STRING,
common.AZCOPY_PATH_SEPARATOR_STRING+filepath.Base(srcDirPath)+common.AZCOPY_PATH_SEPARATOR_STRING, fileList, mockedRPC)
})
// turn off recursive, this time nothing should be transferred
raw.recursive = false
mockedRPC.reset()
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.NotNil)
c.Assert(len(mockedRPC.transfers), chk.Equals, 0)
})
}
// regular directory->virtual dir upload
func (s *cmdIntegrationSuite) TestUploadDirectoryToVirtualDirectory(c *chk.C) {
bsu := getBSU()
vdirName := "vdir"
// set up the source with numerous files
srcDirPath := scenarioHelper{}.generateLocalDirectory(c)
fileList := scenarioHelper{}.generateCommonRemoteScenarioForLocal(c, srcDirPath, "")
// set up an empty container
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawBlobURLWithSAS(c, containerName, vdirName)
raw := getDefaultCopyRawInput(srcDirPath, rawContainerURLWithSAS.String())
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(fileList))
// validate that the right transfers were sent
expectedTransfers := scenarioHelper{}.shaveOffPrefix(fileList, filepath.Base(srcDirPath)+common.AZCOPY_PATH_SEPARATOR_STRING)
validateUploadTransfersAreScheduled(c, common.AZCOPY_PATH_SEPARATOR_STRING,
common.AZCOPY_PATH_SEPARATOR_STRING+filepath.Base(srcDirPath)+common.AZCOPY_PATH_SEPARATOR_STRING, expectedTransfers, mockedRPC)
})
// turn off recursive, this time nothing should be transferred
raw.recursive = false
mockedRPC.reset()
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.NotNil)
c.Assert(len(mockedRPC.transfers), chk.Equals, 0)
})
}
// files(from pattern)->container upload
func (s *cmdIntegrationSuite) TestUploadDirectoryToContainerWithPattern(c *chk.C) {
bsu := getBSU()
// set up the source with numerous files
srcDirPath := scenarioHelper{}.generateLocalDirectory(c)
scenarioHelper{}.generateCommonRemoteScenarioForLocal(c, srcDirPath, "")
// add special files that we wish to include
filesToInclude := []string{"important.pdf", "includeSub/amazing.pdf", "includeSub/wow/amazing.pdf"}
scenarioHelper{}.generateLocalFilesFromList(c, srcDirPath, filesToInclude)
// set up an empty container
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultCopyRawInput(filepath.Join(srcDirPath, "/*.pdf"), rawContainerURLWithSAS.String())
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 1)
// only the top pdf should be included
c.Assert(len(mockedRPC.transfers), chk.Equals, 1)
c.Assert(mockedRPC.transfers[0].Source, chk.Equals, mockedRPC.transfers[0].Destination)
c.Assert(strings.HasSuffix(mockedRPC.transfers[0].Source, ".pdf"), chk.Equals, true)
c.Assert(strings.Contains(mockedRPC.transfers[0].Source[1:], common.AZCOPY_PATH_SEPARATOR_STRING), chk.Equals, false)
})
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package common
import (
chk "gopkg.in/check.v1"
"strings"
)
type uuidTestSuite struct{}
var _ = chk.Suite(&uuidTestSuite{})
func (s *uuidTestSuite) TestGUIDGenerationAndParsing(c *chk.C) {
for i := 0; i < 100; i++ {
uuid := NewUUID()
// no space is allowed
containsSpace := strings.Contains(uuid.String(), " ")
c.Assert(containsSpace, chk.Equals, false)
parsed, err := ParseUUID(uuid.String())
c.Assert(err, chk.IsNil)
c.Assert(parsed, chk.DeepEquals, uuid)
}
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"context"
"net/http"
"runtime"
"sync/atomic"
"time"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
)
// TODO: rationalize and reduce the number of "pacer" classes (possibly by renaming those that, in the current design
// don't actually DO pacing)
type pacer struct {
bytesAvailable int64
availableBytesPerPeriod int64
bytesTransferred int64
lastUpdatedTimestamp int64
}
// this function returns a pacer which limits the number bytes allowed to go out every second
// it does so by issuing tickets (bytes allowed) periodically
func newPacer(bytesPerSecond int64) (p *pacer) {
p = &pacer{bytesAvailable: 0,
availableBytesPerPeriod: bytesPerSecond * int64(PacerTimeToWaitInMs) / 1000,
lastUpdatedTimestamp: time.Now().UnixNano()}
// the pace runs in a separate goroutine for as long as the transfer engine is running
go func() {
for {
// surrender control until time to wait has elapsed
for targetTime := time.Now().Add(time.Millisecond * time.Duration(PacerTimeToWaitInMs)); time.Now().Before(targetTime); {
runtime.Gosched()
}
atomic.StoreInt64(&p.bytesAvailable, p.availableBytesPerPeriod)
// if too many tickets were issued (2x the intended), we should scale back
//if atomic.AddInt64(&p.bytesAvailable, p.availableBytesPerPeriod) > 2*p.availableBytesPerPeriod {
// atomic.AddInt64(&p.bytesAvailable, -p.availableBytesPerPeriod)
//}
}
}()
return
}
// NewPacerPolicyFactory creates a factory that can create telemetry policy objects
// which add telemetry information to outgoing HTTP requests.
func NewPacerPolicyFactory(p *pacer) pipeline.Factory {
return pipeline.FactoryFunc(func(next pipeline.Policy, po *pipeline.PolicyOptions) pipeline.PolicyFunc {
return func(ctx context.Context, request pipeline.Request) (pipeline.Response, error) {
resp, err := next.Do(ctx, request)
if p != nil && err == nil {
// Reducing the pacer's rate limit by 10 s for every 503 error.
p.updateTargetRate(
(resp.Response().StatusCode != http.StatusServiceUnavailable) &&
(resp.Response().StatusCode != http.StatusInternalServerError))
}
return resp, err
}
})
}
// this function is called by goroutines to request right to send a certain amount of bytes
func (p *pacer) requestRightToSend(bytesToSend int64) {
//// attempt to take off the desired number of tickets until success (total number of tickets is not negative)
//for atomic.AddInt64(&p.bytesAvailable, -bytesToSend) < 0 {
//
// // put tickets back if attempt was unsuccessful
// atomic.AddInt64(&p.bytesAvailable, bytesToSend)
// time.Sleep(time.Millisecond * 1)
//}
return
}
func (p *pacer) updateTargetRate(increase bool) {
lastCheckedTimestamp := atomic.LoadInt64(&p.lastUpdatedTimestamp)
//lastCheckedTime := time.Unix(0,lastCheckedTimestamp)
if time.Now().Sub(time.Unix(0, lastCheckedTimestamp)) < (time.Second * 3) {
return
}
if atomic.CompareAndSwapInt64(&p.lastUpdatedTimestamp, lastCheckedTimestamp, time.Now().UnixNano()) {
atomic.StoreInt64(&p.availableBytesPerPeriod, int64(common.Iffloat64(increase, 1.1, 0.9)*float64(p.availableBytesPerPeriod)))
}
}
<file_sep>PROJECT_NAME = azure-storage-azcopy
WORK_DIR = /go/src/github.com/Azure/${PROJECT_NAME}
define with_docker
WORK_DIR=$(WORK_DIR) docker-compose run $(2) --rm $(PROJECT_NAME) $(1)
endef
login: setup ## get a shell into the container
WORK_DIR=$(WORK_DIR) docker-compose run --rm --entrypoint /bin/bash $(PROJECT_NAME)
docker-compose:
which docker-compose
docker-build: docker-compose
WORK_DIR=$(WORK_DIR) docker-compose build --force-rm
docker-clean: docker-compose
WORK_DIR=$(WORK_DIR) docker-compose down
dep: docker-build #
$(call with_docker,dep ensure -v)
setup: clean docker-build dep ## setup environment for development
test: setup ## run go tests
ACCOUNT_NAME=$(ACCOUNT_NAME) ACCOUNT_KEY=$(ACCOUNT_KEY) AWS_ACCESS_KEY_ID=$(AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY=$(AWS_SECRET_ACCESS_KEY) $(call with_docker,go test -race -short -cover ./cmd ./common ./ste ./azbfs, -e ACCOUNT_NAME -e ACCOUNT_KEY -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY)
build: setup ## build binaries for the project
# the environment variables need to be passed into the container explicitly
GOARCH=amd64 GOOS=linux $(call with_docker,go build -o "azcopy_linux_amd64",-e GOARCH -e GOOS)
GOARCH=amd64 GOOS=linux $(call with_docker,go build -tags "se_integration" -o "azcopy_linux_se_amd64",-e GOARCH -e GOOS)
GOARCH=amd64 GOOS=windows $(call with_docker,go build -o "azcopy_windows_amd64.exe",-e GOARCH -e GOOS)
GOARCH=386 GOOS=windows $(call with_docker,go build -o "azcopy_windows_386.exe",-e GOARCH -e GOOS)
build-osx: setup ## build osx binary specially, as it's using CGO
CC=o64-clang CXX=o64-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=1 $(call with_docker,go build -o "azcopy_darwin_amd64",-e CC -e CXX -e GOOS -e GOARCH -e CGO_ENABLED)
smoke: setup ## set up smoke test
$(call with_docker,go build -o test-validator ./testSuite/)
all: setup test build smoke ## run all tests and lints
## unused for now
clean: docker-clean ## clean environment and binaries
rm -rf bin
vet: setup ## run go vet
$(call with_docker,go vet ./...)
lint: setup ## run go lint
$(call with_docker,golint -set_exit_status ./...)
help: ## display this help screen
@grep -h -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
<file_sep>package common
import (
chk "gopkg.in/check.v1"
)
type mainTestSuite struct{}
var _ = chk.Suite(&mainTestSuite{})
const (
minConcurrency = 32
maxConcurrency = 300
)
func (s *mainTestSuite) TestConcurrencyValue(c *chk.C) {
// weak machines
for i := 1; i < 5; i++ {
v := ComputeConcurrencyValue(i)
c.Assert(v, chk.Equals, minConcurrency)
}
// moderately powerful machines
for i := 5; i < 19; i++ {
v := ComputeConcurrencyValue(i)
c.Assert(v, chk.Equals, 16*i)
}
// powerful machines
for i := 19; i < 24; i++ {
v := ComputeConcurrencyValue(i)
c.Assert(v, chk.Equals, maxConcurrency)
}
}
<file_sep>package cmd
import (
"context"
"fmt"
"net/http"
"net/url"
"os"
"reflect"
"strings"
"time"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/azbfs"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-file-go/azfile"
"github.com/JeffreyRichter/enum/enum"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/spf13/cobra"
)
var EResourceType = ResourceType(0)
// ResourceType defines the different types of credentials
type ResourceType uint8
func (ResourceType) SingleFile() ResourceType { return ResourceType(0) }
func (ResourceType) Bucket() ResourceType { return ResourceType(1) }
func (ResourceType) Account() ResourceType { return ResourceType(2) } // For SAS or public.
func (ct ResourceType) String() string {
return enum.StringInt(ct, reflect.TypeOf(ct))
}
func (ct *ResourceType) Parse(s string) error {
val, err := enum.ParseInt(reflect.TypeOf(ct), s, true, true)
if err == nil {
*ct = val.(ResourceType)
}
return err
}
var EServiceType = ServiceType(0)
// ServiceType defines the different types of credentials
type ServiceType uint8
func (ServiceType) Blob() ServiceType { return ServiceType(0) }
func (ServiceType) File() ServiceType { return ServiceType(1) }
func (ServiceType) BlobFS() ServiceType { return ServiceType(2) } // For SAS or public.
func (ServiceType) S3() ServiceType { return ServiceType(3) }
func (ct ServiceType) String() string {
return enum.StringInt(ct, reflect.TypeOf(ct))
}
func (ct *ServiceType) Parse(s string) error {
val, err := enum.ParseInt(reflect.TypeOf(ct), s, true, true)
if err == nil {
*ct = val.(ServiceType)
}
return err
}
// initializes the clean command, its aliases and description.
func init() {
resourceURL := ""
serviceType := EServiceType.Blob()
resourceType := EResourceType.SingleFile()
var serviceTypeStr string
var resourceTypeStr string
cleanCmd := &cobra.Command{
Use: "clean",
Aliases: []string{"clean"},
Short: "clean deletes everything inside the container.",
Args: func(cmd *cobra.Command, args []string) error {
if len(args) > 1 {
return fmt.Errorf("invalid arguments for clean command")
}
resourceURL = args[0]
return nil
},
Run: func(cmd *cobra.Command, args []string) {
err := (&serviceType).Parse(serviceTypeStr)
if err != nil {
panic(fmt.Errorf("fail to parse service type %q, %v", serviceTypeStr, err))
}
err = (&resourceType).Parse(resourceTypeStr)
if err != nil {
panic(fmt.Errorf("fail to parse resource type %q, %v", resourceTypeStr, err))
}
switch serviceType {
case EServiceType.Blob():
switch resourceType {
case EResourceType.Bucket():
cleanContainer(resourceURL)
case EResourceType.SingleFile():
cleanBlob(resourceURL)
case EResourceType.Account():
cleanBlobAccount(resourceURL)
}
case EServiceType.File():
switch resourceType {
case EResourceType.Bucket():
cleanShare(resourceURL)
case EResourceType.SingleFile():
cleanFile(resourceURL)
case EResourceType.Account():
cleanFileAccount(resourceURL)
}
case EServiceType.BlobFS():
switch resourceType {
case EResourceType.Bucket():
cleanFileSystem(resourceURL)
case EResourceType.SingleFile():
cleanBfsFile(resourceURL)
case EResourceType.Account():
cleanBfsAccount(resourceURL)
}
case EServiceType.S3():
switch resourceType {
case EResourceType.Bucket():
cleanBucket(resourceURL)
case EResourceType.SingleFile():
cleanObject(resourceURL)
case EResourceType.Account():
cleanS3Account(resourceURL)
}
default:
panic(fmt.Errorf("illegal resourceType %q", resourceType))
}
},
}
rootCmd.AddCommand(cleanCmd)
cleanCmd.PersistentFlags().StringVar(&resourceTypeStr, "resourceType", "SingleFile", "Resource type, could be single file, bucket or account currently.")
cleanCmd.PersistentFlags().StringVar(&serviceTypeStr, "serviceType", "Blob", "Account type, could be blob, file or blobFS currently.")
}
func cleanContainer(container string) {
containerSas, err := url.Parse(container)
if err != nil {
fmt.Println("error parsing the container sas, ", err)
os.Exit(1)
}
p := azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{})
containerUrl := azblob.NewContainerURL(*containerSas, p)
// perform a list blob
for marker := (azblob.Marker{}); marker.NotDone(); {
// look for all blobs that start with the prefix, so that if a blob is under the virtual directory, it will show up
listBlob, err := containerUrl.ListBlobsFlatSegment(context.Background(), marker, azblob.ListBlobsSegmentOptions{})
if err != nil {
fmt.Println("error listing blobs inside the container. Please check the container sas", err)
os.Exit(1)
}
// Process the blobs returned in this result segment (if the segment is empty, the loop body won't execute)
for _, blobInfo := range listBlob.Segment.BlobItems {
_, err := containerUrl.NewBlobURL(blobInfo.Name).Delete(context.Background(), "include", azblob.BlobAccessConditions{})
if err != nil {
fmt.Println("error deleting the blob from container ", blobInfo.Name)
os.Exit(1)
}
}
marker = listBlob.NextMarker
}
}
func cleanBlob(blob string) {
blobSas, err := url.Parse(blob)
if err != nil {
fmt.Println("error parsing the container sas ", err)
os.Exit(1)
}
p := azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{})
blobUrl := azblob.NewBlobURL(*blobSas, p)
_, err = blobUrl.Delete(context.Background(), "include", azblob.BlobAccessConditions{})
if err != nil {
fmt.Println("error deleting the blob ", err)
os.Exit(1)
}
}
func cleanShare(shareURLStr string) {
u, err := url.Parse(shareURLStr)
if err != nil {
fmt.Println("error parsing the share URL with SAS ", err)
os.Exit(1)
}
p := azfile.NewPipeline(azfile.NewAnonymousCredential(), azfile.PipelineOptions{})
shareURL := azfile.NewShareURL(*u, p)
_, err = shareURL.Delete(context.Background(), azfile.DeleteSnapshotsOptionInclude)
if err != nil {
sErr := err.(azfile.StorageError)
if sErr != nil && sErr.Response().StatusCode != http.StatusNotFound {
fmt.Fprintf(os.Stdout, "error deleting the share for clean share, error '%v'\n", err)
os.Exit(1)
}
}
// Sleep seconds to wait the share deletion got succeeded
time.Sleep(45 * time.Second)
_, err = shareURL.Create(context.Background(), azfile.Metadata{}, 0)
if err != nil {
fmt.Fprintf(os.Stdout, "error creating the share for clean share, error '%v'\n", err)
os.Exit(1)
}
}
func cleanFile(fileURLStr string) {
u, err := url.Parse(fileURLStr)
if err != nil {
fmt.Println("error parsing the file URL with SAS", err)
os.Exit(1)
}
p := azfile.NewPipeline(azfile.NewAnonymousCredential(), azfile.PipelineOptions{})
fileURL := azfile.NewFileURL(*u, p)
_, err = fileURL.Delete(context.Background())
if err != nil {
fmt.Println("error deleting the file ", err)
os.Exit(1)
}
}
func createBlobFSPipeline() pipeline.Pipeline {
// Get the Account Name and Key variables from environment
name := os.Getenv("ACCOUNT_NAME")
key := os.Getenv("ACCOUNT_KEY")
// If the ACCOUNT_NAME and ACCOUNT_KEY are not set in environment variables
if name == "" || key == "" {
fmt.Println("ACCOUNT_NAME and ACCOUNT_KEY should be set before cleaning the file system")
os.Exit(1)
}
// create the blob fs pipeline
c := azbfs.NewSharedKeyCredential(name, key)
return azbfs.NewPipeline(c, azbfs.PipelineOptions{})
}
func cleanFileSystem(fsURLStr string) {
ctx := context.Background()
u, err := url.Parse(fsURLStr)
if err != nil {
fmt.Println("error parsing the file system URL", err)
os.Exit(1)
}
fsURL := azbfs.NewFileSystemURL(*u, createBlobFSPipeline())
_, err = fsURL.Delete(ctx)
if err != nil {
sErr := err.(azbfs.StorageError)
if sErr != nil && sErr.Response().StatusCode != http.StatusNotFound {
fmt.Println(fmt.Sprintf("error deleting the file system for cleaning, %v", err))
os.Exit(1)
}
}
// Sleep seconds to wait the share deletion got succeeded
time.Sleep(45 * time.Second)
_, err = fsURL.Create(ctx)
if err != nil {
fmt.Println(fmt.Fprintf(os.Stdout, "error creating the file system for cleaning, %v", err))
os.Exit(1)
}
}
func cleanBfsFile(fileURLStr string) {
ctx := context.Background()
u, err := url.Parse(fileURLStr)
if err != nil {
fmt.Println("error parsing the file system URL, ", err)
os.Exit(1)
}
fileURL := azbfs.NewFileURL(*u, createBlobFSPipeline())
_, err = fileURL.Delete(ctx)
if err != nil {
fmt.Println(fmt.Sprintf("error deleting the blob FS file, %v", err))
os.Exit(1)
}
}
func cleanBlobAccount(resourceURL string) {
accountSAS, err := url.Parse(resourceURL)
if err != nil {
fmt.Println("error parsing the account sas ", err)
os.Exit(1)
}
p := azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{})
accountURL := azblob.NewServiceURL(*accountSAS, p)
// perform a list account
for marker := (azblob.Marker{}); marker.NotDone(); {
// look for all blobs that start with the prefix, so that if a blob is under the virtual directory, it will show up
lResp, err := accountURL.ListContainersSegment(context.Background(), marker, azblob.ListContainersSegmentOptions{})
if err != nil {
fmt.Println("error listing containers, please check the container sas, ", err)
os.Exit(1)
}
for _, containerItem := range lResp.ContainerItems {
_, err := accountURL.NewContainerURL(containerItem.Name).Delete(context.Background(), azblob.ContainerAccessConditions{})
if err != nil {
fmt.Println("error deleting the container from account, ", err)
os.Exit(1)
}
}
marker = lResp.NextMarker
}
}
func cleanFileAccount(resourceURL string) {
accountSAS, err := url.Parse(resourceURL)
if err != nil {
fmt.Println("error parsing the account sas ", err)
os.Exit(1)
}
p := azfile.NewPipeline(azfile.NewAnonymousCredential(), azfile.PipelineOptions{})
accountURL := azfile.NewServiceURL(*accountSAS, p)
// perform a list account
for marker := (azfile.Marker{}); marker.NotDone(); {
// look for all blobs that start with the prefix, so that if a blob is under the virtual directory, it will show up
lResp, err := accountURL.ListSharesSegment(context.Background(), marker, azfile.ListSharesOptions{})
if err != nil {
fmt.Println("error listing shares, please check the share sas, ", err)
os.Exit(1)
}
for _, shareItem := range lResp.ShareItems {
_, err := accountURL.NewShareURL(shareItem.Name).Delete(context.Background(), azfile.DeleteSnapshotsOptionInclude)
if err != nil {
fmt.Println("error deleting the share from account, ", err)
os.Exit(1)
}
}
marker = lResp.NextMarker
}
}
func cleanS3Account(resourceURL string) {
u, err := url.Parse(resourceURL)
if err != nil {
fmt.Println("fail to parse the S3 service URL, ", err)
os.Exit(1)
}
s3URLParts, err := common.NewS3URLParts(*u)
if err != nil {
fmt.Println("new S3 URL parts, ", err)
os.Exit(1)
}
s3Client := createS3ClientWithMinio(createS3ResOptions{
Location: s3URLParts.Region,
})
buckets, err := s3Client.ListBuckets()
if err != nil {
fmt.Println("error listing S3 service, ", err)
os.Exit(1)
}
for _, bucket := range buckets {
// Remove all the things in bucket with prefix
if !strings.HasPrefix(bucket.Name, "s2scopybucket") {
continue // skip buckets not created by s2s copy testings.
}
objectsCh := make(chan string)
go func() {
defer close(objectsCh)
// List all objects from a bucket-name with a matching prefix.
for object := range s3Client.ListObjectsV2(bucket.Name, "", true, context.Background().Done()) {
if object.Err != nil {
fmt.Printf("error listing the object from bucket %q, %v\n", bucket.Name, err)
os.Exit(1)
}
objectsCh <- object.Key
}
}()
// List bucket, and delete all the objects in the bucket
errChn := s3Client.RemoveObjects(bucket.Name, objectsCh)
for err := range errChn {
fmt.Println("error remove objects from bucket, ", err)
os.Exit(1)
}
// Remove the bucket.
if err := s3Client.RemoveBucket(bucket.Name); err != nil {
fmt.Printf("error deleting the bucket %q from account, %v\n", bucket.Name, err)
os.Exit(1)
}
}
}
func cleanBfsAccount(resourceURL string) {
panic("not implemented: not used")
}
func cleanBucket(resourceURL string) {
panic("not implemented: not used")
}
func cleanObject(resourceURL string) {
panic("not implemented: not used")
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
chk "gopkg.in/check.v1"
"os"
"path/filepath"
)
type syncProcessorSuite struct{}
var _ = chk.Suite(&syncProcessorSuite{})
func (s *syncProcessorSuite) TestLocalDeleter(c *chk.C) {
// set up the local file
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
dstFileName := "extraFile.txt"
scenarioHelper{}.generateLocalFilesFromList(c, dstDirName, []string{dstFileName})
// construct the cooked input to simulate user input
cca := &cookedSyncCmdArgs{
destination: dstDirName,
deleteDestination: common.EDeleteDestination.True(),
}
// set up local deleter
deleter := newSyncLocalDeleteProcessor(cca)
// validate that the file still exists
_, err := os.Stat(filepath.Join(dstDirName, dstFileName))
c.Assert(err, chk.IsNil)
// exercise the deleter
err = deleter.removeImmediately(storedObject{relativePath: dstFileName})
c.Assert(err, chk.IsNil)
// validate that the file no longer exists
_, err = os.Stat(filepath.Join(dstDirName, dstFileName))
c.Assert(err, chk.NotNil)
}
func (s *syncProcessorSuite) TestBlobDeleter(c *chk.C) {
bsu := getBSU()
blobName := "extraBlob.pdf"
// set up the blob to delete
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
scenarioHelper{}.generateBlobsFromList(c, containerURL, []string{blobName})
// validate that the blob exists
blobURL := containerURL.NewBlobURL(blobName)
_, err := blobURL.GetProperties(context.Background(), azblob.BlobAccessConditions{})
c.Assert(err, chk.IsNil)
// construct the cooked input to simulate user input
rawContainerURL := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
parts := azblob.NewBlobURLParts(rawContainerURL)
cca := &cookedSyncCmdArgs{
destination: containerURL.String(),
destinationSAS: parts.SAS.Encode(),
credentialInfo: common.CredentialInfo{CredentialType: common.ECredentialType.Anonymous()},
deleteDestination: common.EDeleteDestination.True(),
}
// set up the blob deleter
deleter, err := newSyncBlobDeleteProcessor(cca)
c.Assert(err, chk.IsNil)
// exercise the deleter
err = deleter.removeImmediately(storedObject{relativePath: blobName})
c.Assert(err, chk.IsNil)
// validate that the blob was deleted
_, err = blobURL.GetProperties(context.Background(), azblob.BlobAccessConditions{})
c.Assert(err, chk.NotNil)
}
<file_sep>package common
import (
"reflect"
"time"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/JeffreyRichter/enum/enum"
)
var ERpcCmd = RpcCmd("")
// JobStatus indicates the status of a Job; the default is InProgress.
type RpcCmd string
func (RpcCmd) None() RpcCmd { return RpcCmd("--none--") }
func (RpcCmd) CopyJobPartOrder() RpcCmd { return RpcCmd("CopyJobPartOrder") }
func (RpcCmd) ListJobs() RpcCmd { return RpcCmd("ListJobs") }
func (RpcCmd) ListJobSummary() RpcCmd { return RpcCmd("ListJobSummary") }
func (RpcCmd) ListSyncJobSummary() RpcCmd { return RpcCmd("ListSyncJobSummary") }
func (RpcCmd) ListJobTransfers() RpcCmd { return RpcCmd("ListJobTransfers") }
func (RpcCmd) CancelJob() RpcCmd { return RpcCmd("Cancel") }
func (RpcCmd) PauseJob() RpcCmd { return RpcCmd("PauseJob") }
func (RpcCmd) ResumeJob() RpcCmd { return RpcCmd("ResumeJob") }
func (RpcCmd) GetJobFromTo() RpcCmd { return RpcCmd("GetJobFromTo") }
func (c RpcCmd) String() string {
return enum.String(c, reflect.TypeOf(c))
}
func (c RpcCmd) Pattern() string { return "/" + c.String() }
func (c *RpcCmd) Parse(s string) error {
val, err := enum.Parse(reflect.TypeOf(c), s, false)
if err == nil {
*c = val.(RpcCmd)
}
return err
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// This struct represents the job info (a single part) to be sent to the storage engine
type CopyJobPartOrderRequest struct {
Version Version // version of the azcopy
JobID JobID // Guid - job identifier
PartNum PartNumber // part number of the job
IsFinalPart bool // to determine the final part for a specific job
ForceWrite bool // to determine if the existing needs to be overwritten or not. If set to true, existing blobs are overwritten
Priority JobPriority // priority of the task
FromTo FromTo
Include map[string]int
Exclude map[string]int
// list of blobTypes to exclude.
ExcludeBlobType []azblob.BlobType
SourceRoot string
DestinationRoot string
Transfers []CopyTransfer
LogLevel LogLevel
BlobAttributes BlobTransferAttributes
SourceSAS string
DestinationSAS string
// commandString hold the user given command which is logged to the Job log file
CommandString string
CredentialInfo CredentialInfo
S2SGetPropertiesInBackend bool
S2SSourceChangeValidation bool
S2SInvalidMetadataHandleOption InvalidMetadataHandleOption
}
// CredentialInfo contains essential credential info which need be transited between modules,
// and used during creating Azure storage client Credential.
type CredentialInfo struct {
CredentialType CredentialType
OAuthTokenInfo OAuthTokenInfo
S3CredentialInfo S3CredentialInfo
}
// S3CredentialInfo contains essential credential info which need to build up S3 client.
type S3CredentialInfo struct {
Endpoint string
Region string
}
type CopyJobPartOrderResponse struct {
ErrorMsg string
JobStarted bool
}
// represents the raw list command input from the user when requested the list of transfer with given status for given JobId
type ListRequest struct {
JobID JobID
OfStatus string // TODO: OfStatus with string type sounds not good, change it to enum
Output OutputFormat
}
// This struct represents the optional attribute for blob request header
type BlobTransferAttributes struct {
BlobType BlobType // The type of a blob - BlockBlob, PageBlob, AppendBlob
ContentType string // The content type specified for the blob.
ContentEncoding string // Specifies which content encodings have been applied to the blob.
ContentLanguage string // Specifies the language of the content
ContentDisposition string // Specifies the content disposition
CacheControl string // Specifies the cache control header
BlockBlobTier BlockBlobTier // Specifies the tier to set on the block blobs.
PageBlobTier PageBlobTier // Specifies the tier to set on the page blobs.
Metadata string // User-defined Name-value pairs associated with the blob
NoGuessMimeType bool // represents user decision to interpret the content-encoding from source file
PreserveLastModifiedTime bool // when downloading, tell engine to set file's timestamp to timestamp of blob
PutMd5 bool // when uploading, should we create and PUT Content-MD5 hashes
MD5ValidationOption HashValidationOption // when downloading, how strictly should we validate MD5 hashes?
BlockSizeInBytes uint32
}
type JobIDDetails struct {
JobId JobID
CommandString string
StartTime int64
}
// ListJobsResponse represent the Job with JobId and
type ListJobsResponse struct {
ErrorMessage string
JobIDDetails []JobIDDetails
}
// ListContainerResponse represents the list of blobs within the container.
type ListContainerResponse struct {
Blobs []string
}
// represents the JobProgressPercentage Summary response for list command when requested the Job Progress Summary for given JobId
type ListJobSummaryResponse struct {
ErrorMsg string
Timestamp time.Time `json:"-"`
JobID JobID `json:"-"`
// TODO: added for debugging purpose. remove later
ActiveConnections int64
// CompleteJobOrdered determines whether the Job has been completely ordered or not
CompleteJobOrdered bool
JobStatus JobStatus
TotalTransfers uint32
TransfersCompleted uint32
TransfersFailed uint32
TransfersSkipped uint32
BytesOverWire uint64
// sum of the size of transfer completed successfully so far.
TotalBytesTransferred uint64
// sum of the total transfer enumerated so far.
TotalBytesEnumerated uint64
FailedTransfers []TransferDetail
SkippedTransfers []TransferDetail
PerfConstraint PerfConstraint
PerfStrings []string `json:"-"`
}
// represents the JobProgressPercentage Summary response for list command when requested the Job Progress Summary for given JobId
type ListSyncJobSummaryResponse struct {
ErrorMsg string
Timestamp time.Time `json:"-"`
JobID JobID `json:"-"`
// TODO: added for debugging purpose. remove later
ActiveConnections int64
// CompleteJobOrdered determines whether the Job has been completely ordered or not
CompleteJobOrdered bool
JobStatus JobStatus
CopyTotalTransfers uint32
CopyTransfersCompleted uint32
CopyTransfersFailed uint32
BytesOverWire uint64
DeleteTotalTransfers uint32
DeleteTransfersCompleted uint32
DeleteTransfersFailed uint32
FailedTransfers []TransferDetail
PerfConstraint PerfConstraint
PerfStrings []string `json:"-"`
// sum of the size of transfer completed successfully so far.
TotalBytesTransferred uint64
// sum of the total transfer enumerated so far.
TotalBytesEnumerated uint64
}
type ListJobTransfersRequest struct {
JobID JobID
OfStatus TransferStatus
}
type ResumeJobRequest struct {
JobID JobID
SourceSAS string
DestinationSAS string
IncludeTransfer map[string]int
ExcludeTransfer map[string]int
CredentialInfo CredentialInfo
}
// represents the Details and details of a single transfer
type TransferDetail struct {
Src string
Dst string
TransferStatus TransferStatus
ErrorCode int32
}
type CancelPauseResumeResponse struct {
ErrorMsg string
CancelledPauseResumed bool
}
// represents the list of Details and details of number of transfers
type ListJobTransfersResponse struct {
ErrorMsg string
JobID JobID
Details []TransferDetail
}
// GetJobFromToRequest indicates request to get job's FromTo info from job part plan header
type GetJobFromToRequest struct {
JobID JobID
}
// GetJobFromToResponse indicates response to get job's FromTo info.
type GetJobFromToResponse struct {
ErrorMsg string
FromTo FromTo
Source string
Destination string
}
<file_sep>package common
import (
"encoding/json"
"github.com/JeffreyRichter/enum/enum"
"reflect"
"strings"
"time"
)
var eOutputMessageType = outputMessageType(0)
// outputMessageType defines the nature of the output, ex: progress report, job summary, or error
type outputMessageType uint8
func (outputMessageType) Init() outputMessageType { return outputMessageType(0) } // simple print, allowed to float up
func (outputMessageType) Info() outputMessageType { return outputMessageType(1) } // simple print, allowed to float up
func (outputMessageType) Progress() outputMessageType { return outputMessageType(2) } // should be printed on the same line over and over again, not allowed to float up
func (outputMessageType) Exit() outputMessageType { return outputMessageType(3) } // exit after printing
func (outputMessageType) Error() outputMessageType { return outputMessageType(4) } // indicate fatal error, exit right after
func (outputMessageType) Prompt() outputMessageType { return outputMessageType(5) } // ask the user a question after erasing the progress
func (o outputMessageType) String() string {
return enum.StringInt(o, reflect.TypeOf(o))
}
// defines the output and how it should be handled
type outputMessage struct {
msgContent string
msgType outputMessageType
exitCode ExitCode // only for when the application is meant to exit after printing (i.e. Error or Final)
inputChannel chan<- string // support getting a response from the user
}
// used for output types that are not simple strings, such as progress and init
// a given format(text,json) is passed in, and the appropriate string is returned
type OutputBuilder func(OutputFormat) string
// -------------------------------------- JSON templates -------------------------------------- //
// used to help formatting of JSON outputs
func GetJsonStringFromTemplate(template interface{}) string {
jsonOutput, err := json.Marshal(template)
PanicIfErr(err)
return string(jsonOutput)
}
// defines the general output template when the format is set to json
type jsonOutputTemplate struct {
TimeStamp time.Time
MessageType string
MessageContent string // a simple string for INFO and ERROR, a serialized JSON for INIT, PROGRESS, EXIT
}
func newJsonOutputTemplate(messageType outputMessageType, messageContent string) *jsonOutputTemplate {
return &jsonOutputTemplate{TimeStamp: time.Now(), MessageType: messageType.String(), MessageContent: messageContent}
}
type InitMsgJsonTemplate struct {
LogFileLocation string
JobID string
}
func GetStandardInitOutputBuilder(jobID string, logFileLocation string) OutputBuilder {
return func(format OutputFormat) string {
if format == EOutputFormat.Json() {
return GetJsonStringFromTemplate(InitMsgJsonTemplate{
JobID: jobID,
LogFileLocation: logFileLocation,
})
}
var sb strings.Builder
sb.WriteString("\nJob " + jobID + " has started\n")
sb.WriteString("Log file is located at: " + logFileLocation)
sb.WriteString("\n")
return sb.String()
}
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"fmt"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-file-go/azfile"
"net/url"
"strings"
)
// allow us to iterate through a path pointing to the file endpoint
type fileTraverser struct {
rawURL *url.URL
p pipeline.Pipeline
ctx context.Context
recursive bool
// a generic function to notify that a new stored object has been enumerated
incrementEnumerationCounter func()
}
func (t *fileTraverser) getPropertiesIfSingleFile() (*azfile.FileGetPropertiesResponse, bool) {
fileURL := azfile.NewFileURL(*t.rawURL, t.p)
fileProps, filePropertiesErr := fileURL.GetProperties(t.ctx)
// if there was no problem getting the properties, it means that we are looking at a single file
if filePropertiesErr == nil {
return fileProps, true
}
return nil, false
}
func (t *fileTraverser) traverse(processor objectProcessor, filters []objectFilter) (err error) {
targetURLParts := azfile.NewFileURLParts(*t.rawURL)
// if not pointing to a share, check if we are pointing to a single file
if targetURLParts.DirectoryOrFilePath != "" {
// check if the url points to a single file
fileProperties, isFile := t.getPropertiesIfSingleFile()
if isFile {
storedObject := newStoredObject(
getObjectNameOnly(targetURLParts.DirectoryOrFilePath),
"", // relative path makes no sense when the full path already points to the file
fileProperties.LastModified(),
fileProperties.ContentLength(),
fileProperties.ContentMD5(),
blobTypeNA,
)
if t.incrementEnumerationCounter != nil {
t.incrementEnumerationCounter()
}
return processIfPassedFilters(filters, storedObject, processor)
}
}
// get the directory URL so that we can list the files
directoryURL := azfile.NewDirectoryURL(targetURLParts.URL(), t.p)
dirStack := &directoryStack{}
dirStack.Push(directoryURL)
for currentDirURL, ok := dirStack.Pop(); ok; currentDirURL, ok = dirStack.Pop() {
// Perform list files and directories.
for marker := (azfile.Marker{}); marker.NotDone(); {
lResp, err := currentDirURL.ListFilesAndDirectoriesSegment(t.ctx, marker, azfile.ListFilesAndDirectoriesOptions{})
if err != nil {
return fmt.Errorf("cannot list files due to reason %s", err)
}
// Process the files returned in this segment.
for _, fileInfo := range lResp.FileItems {
f := currentDirURL.NewFileURL(fileInfo.Name)
//// TODO: the cost is high while otherwise we cannot get the last modified time. As Azure file's PM description, list might get more valuable file properties later, optimize the logic after the change...
//// TODO this traverser is only being used by rm at the moment, so we don't need the properties, uncomment in the future when this is no longer true
//fileProperties, err := f.GetProperties(t.ctx)
//if err != nil {
// return err
//}
// compute the relative path of the file with respect to the target directory
fileURLParts := azfile.NewFileURLParts(f.URL())
relativePath := strings.TrimPrefix(fileURLParts.DirectoryOrFilePath, targetURLParts.DirectoryOrFilePath)
relativePath = strings.TrimPrefix(relativePath, common.AZCOPY_PATH_SEPARATOR_STRING)
storedObject := storedObject{
name: getObjectNameOnly(fileInfo.Name),
relativePath: relativePath,
//lastModifiedTime: fileProperties.LastModified(),
//md5: fileProperties.ContentMD5(),
//size: fileProperties.ContentLength(),
}
if t.incrementEnumerationCounter != nil {
t.incrementEnumerationCounter()
}
processErr := processIfPassedFilters(filters, storedObject, processor)
if processErr != nil {
return processErr
}
}
// If recursive is turned on, add sub directories.
if t.recursive {
for _, dirInfo := range lResp.DirectoryItems {
d := currentDirURL.NewDirectoryURL(dirInfo.Name)
dirStack.Push(d)
}
}
marker = lResp.NextMarker
}
}
return
}
func newFileTraverser(rawURL *url.URL, p pipeline.Pipeline, ctx context.Context, recursive bool, incrementEnumerationCounter func()) (t *fileTraverser) {
t = &fileTraverser{rawURL: rawURL, p: p, ctx: ctx, recursive: recursive, incrementEnumerationCounter: incrementEnumerationCounter}
return
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"errors"
"fmt"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/spf13/cobra"
)
func init() {
loginCmdArgs := loginCmdArgs{tenantID: common.DefaultTenantID}
// lgCmd represents the login command
lgCmd := &cobra.Command{
Use: "login",
SuggestFor: []string{"login"},
Short: loginCmdShortDescription,
Long: loginCmdLongDescription,
Example: loginCmdExample,
Args: func(cmd *cobra.Command, args []string) error {
return nil
},
RunE: func(cmd *cobra.Command, args []string) error {
err := loginCmdArgs.process()
if err != nil {
return fmt.Errorf("failed to perform login command, %v", err)
}
return nil
},
}
rootCmd.AddCommand(lgCmd)
lgCmd.PersistentFlags().StringVar(&loginCmdArgs.tenantID, "tenant-id", "", "the Azure active directory tenant id to use for OAuth device interactive login")
lgCmd.PersistentFlags().StringVar(&loginCmdArgs.aadEndpoint, "aad-endpoint", "", "the Azure active directory endpoint to use for OAuth user interactive login")
// Use identity which aligns to Azure powershell and CLI.
lgCmd.PersistentFlags().BoolVar(&loginCmdArgs.identity, "identity", false, "log in using virtual machine's identity, also known as managed service identity (MSI)")
// Client ID of user-assigned identity.
lgCmd.PersistentFlags().StringVar(&loginCmdArgs.identityClientID, "identity-client-id", "", "client ID of user-assigned identity")
// Object ID of user-assigned identity.
lgCmd.PersistentFlags().StringVar(&loginCmdArgs.identityObjectID, "identity-object-id", "", "object ID of user-assigned identity")
// Resource ID of user-assigned identity.
lgCmd.PersistentFlags().StringVar(&loginCmdArgs.identityResourceID, "identity-resource-id", "", "resource ID of user-assigned identity")
// hide flags
// temporaily hide aad-endpoint and support Production environment only.
lgCmd.PersistentFlags().MarkHidden("aad-endpoint")
}
type loginCmdArgs struct {
// OAuth login arguments
tenantID string
aadEndpoint string
identity bool // Whether to use MSI.
// Info of VM's user assigned identity, client or object ids of the service identity are required if
// your VM has multiple user-assigned managed identities.
// https://docs.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/how-to-use-vm-token#get-a-token-using-go
identityClientID string
identityObjectID string
identityResourceID string
}
func (lca loginCmdArgs) validate() error {
// Only support one kind of oauth login at same time.
if lca.identity && lca.tenantID != "" {
return errors.New("tenant ID cannot be used with identity")
}
if !lca.identity && (lca.identityClientID != "" || lca.identityObjectID != "" || lca.identityResourceID != "") {
return errors.New("identity client/object/resource ID is only valid when using identity")
}
return nil
}
func (lca loginCmdArgs) process() error {
// Validate login parameters.
if err := lca.validate(); err != nil {
return err
}
uotm := GetUserOAuthTokenManagerInstance()
// Persist the token to cache, if login fulfilled successfully.
if lca.identity {
if _, err := uotm.MSILogin(context.TODO(), common.IdentityInfo{
ClientID: lca.identityClientID,
ObjectID: lca.identityObjectID,
MSIResID: lca.identityResourceID,
}, true); err != nil {
return err
}
// For MSI login, info success message to user.
glcm.Info("Login with identity succeeded.")
} else {
if _, err := uotm.UserLogin(lca.tenantID, lca.aadEndpoint, true); err != nil {
return err
}
// User fulfills login in browser, and there would be message in browser indicating whether login fulfilled successfully.
glcm.Info("Login succeeded.")
}
return nil
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package common
import (
"os"
"syscall"
"unsafe"
)
func CreateFileOfSize(destinationPath string, fileSize int64) (*os.File, error) {
return CreateFileOfSizeWithWriteThroughOption(destinationPath, fileSize, false)
}
func CreateFileOfSizeWithWriteThroughOption(destinationPath string, fileSize int64, writeThrough bool) (*os.File, error) {
err := CreateParentDirectoryIfNotExist(destinationPath)
if err != nil {
return nil, err
}
fd, err := OpenWithWriteThroughSetting(destinationPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, DEFAULT_FILE_PERM, writeThrough)
if err != nil {
return nil, err
}
f := os.NewFile(uintptr(fd), destinationPath)
if f == nil {
return nil, os.ErrInvalid
}
if truncateError := f.Truncate(fileSize); truncateError != nil {
return nil, truncateError
}
return f, nil
}
func makeInheritSa() *syscall.SecurityAttributes {
var sa syscall.SecurityAttributes
sa.Length = uint32(unsafe.Sizeof(sa))
sa.InheritHandle = 1
return &sa
}
const FILE_ATTRIBUTE_WRITE_THROUGH = 0x80000000
// Copied from syscall.open, but modified to allow setting of writeThrough option
// Param "perm" is unused both here and in the original Windows version of this routine.
func OpenWithWriteThroughSetting(path string, mode int, perm uint32, writeThrough bool) (fd syscall.Handle, err error) {
if len(path) == 0 {
return syscall.InvalidHandle, syscall.ERROR_FILE_NOT_FOUND
}
pathp, err := syscall.UTF16PtrFromString(path)
if err != nil {
return syscall.InvalidHandle, err
}
var access uint32
switch mode & (syscall.O_RDONLY | syscall.O_WRONLY | syscall.O_RDWR) {
case syscall.O_RDONLY:
access = syscall.GENERIC_READ
case syscall.O_WRONLY:
access = syscall.GENERIC_WRITE
case syscall.O_RDWR:
access = syscall.GENERIC_READ | syscall.GENERIC_WRITE
}
if mode&syscall.O_CREAT != 0 {
access |= syscall.GENERIC_WRITE
}
if mode&syscall.O_APPEND != 0 {
access &^= syscall.GENERIC_WRITE
access |= syscall.FILE_APPEND_DATA
}
sharemode := uint32(syscall.FILE_SHARE_READ | syscall.FILE_SHARE_WRITE)
var sa *syscall.SecurityAttributes
if mode&syscall.O_CLOEXEC == 0 {
sa = makeInheritSa()
}
var createmode uint32
switch {
case mode&(syscall.O_CREAT|syscall.O_EXCL) == (syscall.O_CREAT | syscall.O_EXCL):
createmode = syscall.CREATE_NEW
case mode&(syscall.O_CREAT|syscall.O_TRUNC) == (syscall.O_CREAT | syscall.O_TRUNC):
createmode = syscall.CREATE_ALWAYS
case mode&syscall.O_CREAT == syscall.O_CREAT:
createmode = syscall.OPEN_ALWAYS
case mode&syscall.O_TRUNC == syscall.O_TRUNC:
createmode = syscall.TRUNCATE_EXISTING
default:
createmode = syscall.OPEN_EXISTING
}
var attr uint32
attr = syscall.FILE_ATTRIBUTE_NORMAL
if writeThrough {
attr |= FILE_ATTRIBUTE_WRITE_THROUGH
}
h, e := syscall.CreateFile(pathp, access, sharemode, sa, createmode, attr, 0)
return h, e
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"errors"
"fmt"
"net/url"
"strconv"
"strings"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-azcopy/ste"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/spf13/cobra"
)
func init() {
var sourcePath = ""
// listContainerCmd represents the list container command
// listContainer list the blobs inside the container or virtual directory inside the container
listContainerCmd := &cobra.Command{
Use: "list [containerURL]",
Aliases: []string{"ls"},
Short: listCmdShortDescription,
Long: listCmdLongDescription,
Example: listCmdExample,
Args: func(cmd *cobra.Command, args []string) error {
// the listContainer command requires necessarily to have an argument
// If no argument is passed then it is not valid
// lsc expects the container path / virtual directory
if len(args) == 0 || len(args) > 2 {
return errors.New("this command only requires container destination")
}
sourcePath = args[0]
return nil
},
Run: func(cmd *cobra.Command, args []string) {
// the expected argument in input is the container sas / or path of virtual directory in the container.
// verifying the location type
location := inferArgumentLocation(sourcePath)
if location != location.Blob() {
glcm.Error("invalid path passed for listing. given source is of type " + location.String() + " while expect is container / container path ")
}
err := HandleListContainerCommand(sourcePath)
if err == nil {
glcm.Exit(nil, common.EExitCode.Success())
} else {
glcm.Error(err.Error())
}
},
}
listContainerCmd.PersistentFlags().BoolVar(¶meters.MachineReadable, "machine-readable", false, "Lists file sizes in bytes")
listContainerCmd.PersistentFlags().BoolVar(¶meters.RunningTally, "running-tally", false, "Counts the total number of files & their sizes")
listContainerCmd.PersistentFlags().BoolVar(¶meters.MegaUnits, "mega-units", false, "Displays units in orders of 1000, not 1024")
rootCmd.AddCommand(listContainerCmd)
}
type ListParameters struct {
MachineReadable bool
RunningTally bool
MegaUnits bool
}
var parameters = ListParameters{}
// HandleListContainerCommand handles the list container command
func HandleListContainerCommand(source string) (err error) {
// TODO: Temporarily use context.TODO(), this should be replaced with a root context from main.
ctx := context.WithValue(context.TODO(), ste.ServiceAPIVersionOverride, ste.DefaultServiceApiVersion)
credentialInfo := common.CredentialInfo{}
// Use source as resource URL, and it can be public access resource URL.
if credentialInfo.CredentialType, err = getBlobCredentialType(ctx, source, true, false); err != nil {
return err
} else if credentialInfo.CredentialType == common.ECredentialType.OAuthToken() {
// Message user that they are using Oauth token for authentication,
// in case of silently using cached token without consciousness。
glcm.Info("List is using OAuth token for authentication.")
uotm := GetUserOAuthTokenManagerInstance()
if tokenInfo, err := uotm.GetTokenInfo(ctx); err != nil {
return err
} else {
credentialInfo.OAuthTokenInfo = *tokenInfo
}
}
// Create Pipeline which will be used further in the blob operations.
p, err := createBlobPipeline(ctx, credentialInfo)
if err != nil {
return err
}
// attempt to parse the source url
sourceURL, err := url.Parse(source)
if err != nil {
return errors.New("cannot parse source URL")
}
util := copyHandlerUtil{} // TODO: util could be further refactored
// get the container url to be used for listing
literalContainerURL := util.getContainerURLFromString(*sourceURL)
containerURL := azblob.NewContainerURL(literalContainerURL, p)
// get the search prefix to query the service
searchPrefix := ""
// if the source is container url, then searchPrefix is empty
if !util.urlIsContainerOrShare(sourceURL) {
searchPrefix = util.getBlobNameFromURL(sourceURL.Path)
}
if len(searchPrefix) > 0 {
// if the user did not specify / at the end of the virtual directory, add it before doing the prefix search
if strings.LastIndex(searchPrefix, "/") != len(searchPrefix)-1 {
searchPrefix += "/"
}
}
summary := common.ListContainerResponse{}
fileCount := 0
sizeCount := 0
// perform a list blob
for marker := (azblob.Marker{}); marker.NotDone(); {
// look for all blobs that start with the prefix
listBlob, err := containerURL.ListBlobsFlatSegment(ctx, marker,
azblob.ListBlobsSegmentOptions{Prefix: searchPrefix})
if err != nil {
return fmt.Errorf("cannot list blobs for download. Failed with error %s", err.Error())
}
// Process the blobs returned in this result segment (if the segment is empty, the loop body won't execute)
for _, blobInfo := range listBlob.Segment.BlobItems {
blobName := blobInfo.Name + "; Content Size: "
if parameters.MachineReadable {
blobName += strconv.Itoa(int(*blobInfo.Properties.ContentLength))
} else {
blobName += byteSizeToString(*blobInfo.Properties.ContentLength)
}
if parameters.RunningTally {
fileCount++
sizeCount += int(*blobInfo.Properties.ContentLength)
}
if len(searchPrefix) > 0 {
// strip away search prefix from the blob name.
blobName = strings.Replace(blobName, searchPrefix, "", 1)
}
summary.Blobs = append(summary.Blobs, blobName)
}
marker = listBlob.NextMarker
printListContainerResponse(&summary)
if parameters.RunningTally {
glcm.Info("")
glcm.Info("File count: " + strconv.Itoa(fileCount))
if parameters.MachineReadable {
glcm.Info("Total file size: " + strconv.Itoa(sizeCount))
} else {
glcm.Info("Total file size: " + byteSizeToString(int64(sizeCount)))
}
}
}
return nil
}
// printListContainerResponse prints the list container response
func printListContainerResponse(lsResponse *common.ListContainerResponse) {
if len(lsResponse.Blobs) == 0 {
return
}
// TODO determine what's the best way to display the blobs in JSON
// TODO no partner team needs this functionality right now so the blobs are just outputted as info
for index := 0; index < len(lsResponse.Blobs); index++ {
glcm.Info(lsResponse.Blobs[index])
}
}
var megaSize = []string{
"B",
"KB",
"MB",
"GB",
"TB",
"PB",
"EB",
}
func byteSizeToString(size int64) string {
units := []string{
"B",
"KiB",
"MiB",
"GiB",
"TiB",
"PiB",
"EiB", //Let's face it, a file probably won't be more than 1000 exabytes in YEARS. (and int64 literally isn't large enough to handle too many exbibytes. 128 bit processors when)
}
unit := 0
floatSize := float64(size)
gigSize := 1024
if parameters.MegaUnits {
gigSize = 1000
units = megaSize
}
for floatSize/float64(gigSize) >= 1 {
unit++
floatSize /= float64(gigSize)
}
return strconv.FormatFloat(floatSize, 'f', 2, 64) + " " + units[unit]
}
<file_sep>package cmd
import (
chk "gopkg.in/check.v1"
)
type byteSizeToStringTestSuite struct{}
var _ = chk.Suite(&byteSizeToStringTestSuite{})
func (s *byteSizeToStringTestSuite) TestBToString(c *chk.C) {
inputs := []int64{50, 100, 125}
expects := []string{"50.00 B", "100.00 B", "125.00 B"}
for k, v := range inputs {
output := byteSizeToString(v)
c.Assert(output, chk.Equals, expects[k])
}
}
func (s *byteSizeToStringTestSuite) TestKiBToString(c *chk.C) {
inputs := []int64{1024, 51200, 128000, 5632, 5376}
expects := []string{"1.00 KiB", "50.00 KiB", "125.00 KiB", "5.50 KiB", "5.25 KiB"}
for k, v := range inputs {
output := byteSizeToString(v)
c.Assert(output, chk.Equals, expects[k])
}
}
func (s *byteSizeToStringTestSuite) TestMiBToString(c *chk.C) {
inputs := []int64{1048576, 52428800, 131072000, 5767168, 5505024}
expects := []string{"1.00 MiB", "50.00 MiB", "125.00 MiB", "5.50 MiB", "5.25 MiB"}
for k, v := range inputs {
output := byteSizeToString(v)
c.Assert(output, chk.Equals, expects[k])
}
}
func (s *byteSizeToStringTestSuite) TestGiBToString(c *chk.C) {
inputs := []int64{1073741824, 53687091200, 134217728000, 5905580032, 5637144576}
expects := []string{"1.00 GiB", "50.00 GiB", "125.00 GiB", "5.50 GiB", "5.25 GiB"}
for k, v := range inputs {
output := byteSizeToString(v)
c.Assert(output, chk.Equals, expects[k])
}
}
func (s *byteSizeToStringTestSuite) TestTiBToString(c *chk.C) {
inputs := []int64{1099511627776, 54975581388800, 137438953472000, 6047313952768, 5772436045824}
expects := []string{"1.00 TiB", "50.00 TiB", "125.00 TiB", "5.50 TiB", "5.25 TiB"}
for k, v := range inputs {
output := byteSizeToString(v)
c.Assert(output, chk.Equals, expects[k])
}
}
func (s *byteSizeToStringTestSuite) TestPiBToString(c *chk.C) {
inputs := []int64{1125899906842624, 56294995342131200, 140737488355328000, 6192449487634432, 5910974510923776}
expects := []string{"1.00 PiB", "50.00 PiB", "125.00 PiB", "5.50 PiB", "5.25 PiB"}
for k, v := range inputs {
output := byteSizeToString(v)
c.Assert(output, chk.Equals, expects[k])
}
}
func (s *byteSizeToStringTestSuite) TestEiBToString(c *chk.C) {
inputs := []int64{1152921504606846976, 6341068275337658368, 6052837899185946624}
expects := []string{"1.00 EiB", "5.50 EiB", "5.25 EiB"} //50 & 125 aren't present Because they overflow int64
for k, v := range inputs {
output := byteSizeToString(v)
c.Assert(output, chk.Equals, expects[k])
}
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
"strings"
"time"
)
// -------------------------------------- Component Definitions -------------------------------------- \\
// the following interfaces and structs allow the sync enumerator
// to be generic and has as little duplicated code as possible
// represent a local or remote resource object (ex: local file, blob, etc.)
// we can add more properties if needed, as this is easily extensible
type storedObject struct {
name string
lastModifiedTime time.Time
size int64
md5 []byte
blobType azblob.BlobType // will be "None" when unknown or not applicable
// partial path relative to its root directory
// example: rootDir=/var/a/b/c fullPath=/var/a/b/c/d/e/f.pdf => relativePath=d/e/f.pdf name=f.pdf
relativePath string
}
const (
blobTypeNA = azblob.BlobNone // some things, e.g. local files, aren't blobs so they don't have their own blob type so we use this "not applicable" constant
)
func (storedObject *storedObject) isMoreRecentThan(storedObject2 storedObject) bool {
return storedObject.lastModifiedTime.After(storedObject2.lastModifiedTime)
}
// a constructor is used so that in case the storedObject has to change, the callers would get a compilation error
func newStoredObject(name string, relativePath string, lmt time.Time, size int64, md5 []byte, blobType azblob.BlobType) storedObject {
return storedObject{
name: name,
relativePath: relativePath,
lastModifiedTime: lmt,
size: size,
md5: md5,
blobType: blobType,
}
}
// capable of traversing a structured resource like container or local directory
// pass each storedObject to the given objectProcessor if it passes all the filters
type resourceTraverser interface {
traverse(processor objectProcessor, filters []objectFilter) error
}
// given a storedObject, process it accordingly
type objectProcessor func(storedObject storedObject) error
// given a storedObject, verify if it satisfies the defined conditions
// if yes, return true
type objectFilter interface {
doesPass(storedObject storedObject) bool
}
// -------------------------------------- Generic Enumerators -------------------------------------- \\
// the following enumerators must be instantiated with configurations
// they define the work flow in the most generic terms
type syncEnumerator struct {
// these allow us to go through the source and destination
// there is flexibility in which side we scan first, it could be either the source or the destination
primaryTraverser resourceTraverser
secondaryTraverser resourceTraverser
// the results from the primary traverser would be stored here
objectIndexer *objectIndexer
// general filters apply to both the primary and secondary traverser
filters []objectFilter
// the processor that apply only to the secondary traverser
// it processes objects as scanning happens
// based on the data from the primary traverser stored in the objectIndexer
objectComparator objectProcessor
// a finalizer that is always called if the enumeration finishes properly
finalize func() error
}
func newSyncEnumerator(primaryTraverser, secondaryTraverser resourceTraverser, indexer *objectIndexer,
filters []objectFilter, comparator objectProcessor, finalize func() error) *syncEnumerator {
return &syncEnumerator{
primaryTraverser: primaryTraverser,
secondaryTraverser: secondaryTraverser,
objectIndexer: indexer,
filters: filters,
objectComparator: comparator,
finalize: finalize,
}
}
func (e *syncEnumerator) enumerate() (err error) {
// enumerate the primary resource and build lookup map
err = e.primaryTraverser.traverse(e.objectIndexer.store, e.filters)
if err != nil {
return
}
// enumerate the secondary resource and as the objects pass the filters
// they will be passed to the object comparator
// which can process given objects based on what's already indexed
// note: transferring can start while scanning is ongoing
err = e.secondaryTraverser.traverse(e.objectComparator, e.filters)
if err != nil {
return
}
// execute the finalize func which may perform useful clean up steps
err = e.finalize()
if err != nil {
return
}
return
}
type copyEnumerator struct {
traverser resourceTraverser
// general filters apply to the objects returned by the traverser
filters []objectFilter
// receive objects from the traverser and dispatch them for transferring
objectDispatcher objectProcessor
// a finalizer that is always called if the enumeration finishes properly
finalize func() error
}
func newCopyEnumerator(traverser resourceTraverser, filters []objectFilter, objectDispatcher objectProcessor, finalizer func() error) *copyEnumerator {
return ©Enumerator{
traverser: traverser,
filters: filters,
objectDispatcher: objectDispatcher,
finalize: finalizer,
}
}
func (e *copyEnumerator) enumerate() (err error) {
err = e.traverser.traverse(e.objectDispatcher, e.filters)
if err != nil {
return
}
// execute the finalize func which may perform useful clean up steps
return e.finalize()
}
// -------------------------------------- Helper Funcs -------------------------------------- \\
func passedFilters(filters []objectFilter, storedObject storedObject) bool {
if filters != nil && len(filters) > 0 {
// loop through the filters, if any of them fail, then return false
for _, filter := range filters {
if !filter.doesPass(storedObject) {
return false
}
}
}
return true
}
func processIfPassedFilters(filters []objectFilter, storedObject storedObject, processor objectProcessor) (err error) {
if passedFilters(filters, storedObject) {
err = processor(storedObject)
}
return
}
// storedObject names are useful for filters
func getObjectNameOnly(fullPath string) (nameOnly string) {
lastPathSeparator := strings.LastIndex(fullPath, common.AZCOPY_PATH_SEPARATOR_STRING)
// if there is a path separator and it is not the last character
if lastPathSeparator > 0 && lastPathSeparator != len(fullPath)-1 {
// then we separate out the name of the storedObject
nameOnly = fullPath[lastPathSeparator+1:]
} else {
nameOnly = fullPath
}
return
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"io"
"sync/atomic"
)
type liteBodyPacer struct {
body io.Reader // Seeking is required to support retries
p *pacer
}
// creates pacer that's not coupled to MMF (the obsolete non-lite one used memory mapped files)
func newLiteRequestBodyPacer(requestBody io.ReadSeeker, p *pacer) io.ReadSeeker {
if p == nil {
panic("pr must not be nil")
}
return &liteBodyPacer{body: requestBody, p: p}
}
// creates pacer that's not coupled to MMF (the obsolete non-lite one used memory mapped files)
func newLiteResponseBodyPacer(responseBody io.ReadCloser, p *pacer) io.ReadCloser {
if p == nil {
panic("pr must not be nil")
}
return &liteBodyPacer{body: responseBody, p: p}
}
func (lbp *liteBodyPacer) Read(p []byte) (int, error) {
n, err := lbp.body.Read(p)
atomic.AddInt64(&lbp.p.bytesTransferred, int64(n))
return n, err
}
// Seeking is required to support retries
func (lbp *liteBodyPacer) Seek(offset int64, whence int) (offsetFromStart int64, err error) {
return lbp.body.(io.ReadSeeker).Seek(offset, whence)
}
// bytesOverTheWire supports Close but the underlying stream may not; if it does, Close will close it.
func (lbp *liteBodyPacer) Close() error {
if c, ok := lbp.body.(io.Closer); ok {
return c.Close()
}
return nil
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"context"
"fmt"
"os"
"path/filepath"
"runtime"
"sort"
"strings"
"sync"
"sync/atomic"
"time"
"path"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
)
// sortPlanFiles is struct that implements len, swap and less than functions
// this struct is used to sort the JobPartPlan files of the same job on the basis
// of Part number
// TODO: can use the same struct to sort job part plan files on the basis of job number and part number
type sortPlanFiles struct{ Files []os.FileInfo }
// Less determines the comparison between two fileInfo's
// compares the part number of the Job Part files
func (spf sortPlanFiles) Less(i, j int) bool {
_, parti, err := JobPartPlanFileName(spf.Files[i].Name()).Parse()
if err != nil {
panic(fmt.Errorf("error parsing the JobPartPlanfile name %s. Failed with error %s", spf.Files[i].Name(), err.Error()))
}
_, partj, err := JobPartPlanFileName(spf.Files[j].Name()).Parse()
if err != nil {
panic(fmt.Errorf("error parsing the JobPartPlanfile name %s. Failed with error %s", spf.Files[j].Name(), err.Error()))
}
return parti < partj
}
// Len determines the length of number of files
func (spf sortPlanFiles) Len() int { return len(spf.Files) }
func (spf sortPlanFiles) Swap(i, j int) { spf.Files[i], spf.Files[j] = spf.Files[j], spf.Files[i] }
// JobAdmin is the singleton that manages ALL running Jobs, their parts, & their transfers
var JobsAdmin interface {
NewJobPartPlanFileName(jobID common.JobID, partNumber common.PartNumber) JobPartPlanFileName
// JobIDDetails returns point-in-time list of JobIDDetails
JobIDs() []common.JobID
// JobMgr returns the specified JobID's JobMgr
JobMgr(jobID common.JobID) (IJobMgr, bool)
JobMgrEnsureExists(jobID common.JobID, level common.LogLevel, commandString string) IJobMgr
// AddJobPartMgr associates the specified JobPartMgr with the Jobs Administrator
//AddJobPartMgr(appContext context.Context, planFile JobPartPlanFileName) IJobPartMgr
/*ScheduleTransfer(jptm IJobPartTransferMgr)*/
ScheduleChunk(priority common.JobPriority, chunkFunc chunkFunc)
ResurrectJob(jobId common.JobID, sourceSAS string, destinationSAS string) bool
ResurrectJobParts()
QueueJobParts(jpm IJobPartMgr)
// AppPathFolder returns the Azcopy application path folder.
// JobPartPlanFile will be created inside this folder.
AppPathFolder() string
// returns the current value of bytesOverWire.
BytesOverWire() int64
//DeleteJob(jobID common.JobID)
common.ILoggerCloser
}
func initJobsAdmin(appCtx context.Context, concurrentConnections int, concurrentFilesLimit int, targetRateInMBps int64, azcopyAppPathFolder string, azcopyLogPathFolder string) {
if JobsAdmin != nil {
panic("initJobsAdmin was already called once")
}
const channelSize = 100000
// PartsChannelSize defines the number of JobParts which can be placed into the
// parts channel. Any JobPart which comes from FE and partChannel is full,
// has to wait and enumeration of transfer gets blocked till then.
// TODO : PartsChannelSize Needs to be discussed and can change.
const PartsChannelSize = 10000
// partsCh is the channel in which all JobParts are put
// for scheduling transfers. When the next JobPart order arrives
// transfer engine creates the JobPartPlan file and
// puts the JobPartMgr in partchannel
// from which each part is picked up one by one
// and transfers of that JobPart are scheduled
partsCh := make(chan IJobPartMgr, PartsChannelSize)
// Create normal & low transfer/chunk channels
normalTransferCh, normalChunkCh := make(chan IJobPartTransferMgr, channelSize), make(chan chunkFunc, channelSize)
lowTransferCh, lowChunkCh := make(chan IJobPartTransferMgr, channelSize), make(chan chunkFunc, channelSize)
// Create suicide channel which is used to scale back on the number of workers
suicideCh := make(chan SuicideJob, concurrentConnections)
planDir := path.Join(azcopyAppPathFolder, "plans")
if err := os.Mkdir(planDir, os.ModeDir|os.ModePerm); err != nil && !os.IsExist(err) {
common.PanicIfErr(err)
}
// TODO: make ram usage configurable, with the following as just the default
// Decide on a max amount of RAM we are willing to use. This functions as a cap, and prevents excessive usage.
// There's no measure of physical RAM in the STD library, so we guestimate conservatively, based on CPU count (logical, not phyiscal CPUs)
// Note that, as at Feb 2019, the multiSizeSlicePooler uses additional RAM, over this level, since it includes the cache of
// currently-unnused, re-useable slices, that is not tracked by cacheLimiter.
// Also, block sizes that are not powers of two result in extra usage over and above this limit. (E.g. 100 MB blocks each
// count 100 MB towards this limit, but actually consume 128 MB)
const gbToUsePerCpu = 0.5 // should be enough to support the amount of traffic 1 CPU can drive, and also less than the typical installed RAM-per-CPU
gbToUse := float32(runtime.NumCPU()) * gbToUsePerCpu
if gbToUse > 16 {
gbToUse = 16 // cap it. Even 6 is enough at 10 Gbps with standard 8MB chunk size, but we need allow extra here to help if larger blob block sizes are selected by user, since then we need more memory to get enough chunks to have enough network-level concurrency
}
maxRamBytesToUse := int64(gbToUse * 1024 * 1024 * 1024)
ja := &jobsAdmin{
logger: common.NewAppLogger(pipeline.LogInfo, azcopyLogPathFolder),
jobIDToJobMgr: newJobIDToJobMgr(),
logDir: azcopyLogPathFolder,
planDir: planDir,
pacer: newPacer(targetRateInMBps * 1024 * 1024),
slicePool: common.NewMultiSizeSlicePool(common.MaxBlockBlobBlockSize),
cacheLimiter: common.NewCacheLimiter(maxRamBytesToUse),
fileCountLimiter: common.NewCacheLimiter(int64(concurrentFilesLimit)),
appCtx: appCtx,
coordinatorChannels: CoordinatorChannels{
partsChannel: partsCh,
normalTransferCh: normalTransferCh,
lowTransferCh: lowTransferCh,
},
xferChannels: XferChannels{
partsChannel: partsCh,
normalTransferCh: normalTransferCh,
lowTransferCh: lowTransferCh,
normalChunckCh: normalChunkCh,
lowChunkCh: lowChunkCh,
suicideCh: suicideCh,
},
}
// create new context with the defaultService api version set as value to serviceAPIVersionOverride in the app context.
ja.appCtx = context.WithValue(ja.appCtx, ServiceAPIVersionOverride, DefaultServiceApiVersion)
JobsAdmin = ja
// Spin up slice pool pruner
go ja.slicePoolPruneLoop()
// One routine constantly monitors the partsChannel. It takes the JobPartManager from
// the Channel and schedules the transfers of that JobPart.
go ja.scheduleJobParts()
// Spin up the desired number of executionEngine workers to process chunks
for cc := 0; cc < concurrentConnections; cc++ {
go ja.chunkProcessor(cc)
}
// Spin up a separate set of workers to process initiation of transfers (so that transfer initiation can't starve
// out progress on already-scheduled chunks. (Not sure whether that can really happen, but this protects against it
// anyway.)
// Perhaps MORE importantly, doing this separately gives us more CONTROL over how we interact with the file system.
for cc := 0; cc < NumTransferInitiationRoutines; cc++ {
go ja.transferProcessor(cc)
}
}
const NumTransferInitiationRoutines = 64 // TODO make this configurable
// QueueJobParts puts the given JobPartManager into the partChannel
// from where this JobPartMgr will be picked by a routine and
// its transfers will be scheduled
func (ja *jobsAdmin) QueueJobParts(jpm IJobPartMgr) {
ja.coordinatorChannels.partsChannel <- jpm
}
// 1 single goroutine runs this method and InitJobsAdmin kicks that goroutine off.
func (ja *jobsAdmin) scheduleJobParts() {
for {
jobPart := <-ja.xferChannels.partsChannel
// If the job manager is not found for the JobId of JobPart
// taken from partsChannel
// there is an error in our code
// this not should not happen since JobMgr is initialized before any
// job part is added
jobId := jobPart.Plan().JobID
jm, found := ja.JobMgr(jobId)
if !found {
panic(fmt.Errorf("no job manager found for JobId %s", jobId.String()))
}
jobPart.ScheduleTransfers(jm.Context())
}
}
// general purpose worker that reads in schedules chunk jobs, and executes chunk jobs
func (ja *jobsAdmin) chunkProcessor(workerID int) {
for {
// We check for suicides first to shrink goroutine pool
// Then, we check chunks: normal & low priority
select {
case <-ja.xferChannels.suicideCh: // note: as at Dec 2018, this channel is not (yet) used
return
default:
select {
case chunkFunc := <-ja.xferChannels.normalChunckCh:
chunkFunc(workerID)
default:
select {
case chunkFunc := <-ja.xferChannels.lowChunkCh:
chunkFunc(workerID)
default:
time.Sleep(100 * time.Millisecond) // Sleep before looping around
// TODO: Question: In order to safely support high goroutine counts,
// do we need to review sleep duration, or find an approach that does not require waking every x milliseconds
// For now, duration has been increased substantially from the previous 1 ms, to reduce cost of
// the wake-ups.
}
}
}
}
}
// separate from the chunkProcessor, this dedicated worker that reads in and executes transfer initiation jobs
// (which in turn schedule chunks that get picked up by chunkProcessor)
func (ja *jobsAdmin) transferProcessor(workerID int) {
startTransfer := func(jptm IJobPartTransferMgr) {
if jptm.WasCanceled() {
if jptm.ShouldLog(pipeline.LogInfo) {
jptm.Log(pipeline.LogInfo, fmt.Sprintf(" is not picked up worked %d because transfer was cancelled", workerID))
}
jptm.ReportTransferDone()
} else {
// TODO fix preceding space
if jptm.ShouldLog(pipeline.LogInfo) {
jptm.Log(pipeline.LogInfo, fmt.Sprintf("has worker %d which is processing TRANSFER", workerID))
}
jptm.StartJobXfer()
}
}
for {
// No suicide check here, because this routine runs only in a small number of goroutines, so no need to kill them off
select {
case jptm := <-ja.xferChannels.normalTransferCh:
startTransfer(jptm)
default:
select {
case jptm := <-ja.xferChannels.lowTransferCh:
startTransfer(jptm)
default:
time.Sleep(10 * time.Millisecond) // Sleep before looping around
}
}
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// There will be only 1 instance of the jobsAdmin type.
// The coordinator uses this to manage all the running jobs and their job parts.
type jobsAdmin struct {
logger common.ILoggerCloser
jobIDToJobMgr jobIDToJobMgr // Thread-safe map from each JobID to its JobInfo
// Other global state can be stored in more fields here...
logDir string // Where log files are stored
planDir string // Initialize to directory where Job Part Plans are stored
coordinatorChannels CoordinatorChannels
xferChannels XferChannels
appCtx context.Context
pacer *pacer
slicePool common.ByteSlicePooler
cacheLimiter common.CacheLimiter
fileCountLimiter common.CacheLimiter
}
type CoordinatorChannels struct {
partsChannel chan<- IJobPartMgr // Write Only
normalTransferCh chan<- IJobPartTransferMgr // Write-only
lowTransferCh chan<- IJobPartTransferMgr // Write-only
}
type XferChannels struct {
partsChannel <-chan IJobPartMgr // Read only
normalTransferCh <-chan IJobPartTransferMgr // Read-only
lowTransferCh <-chan IJobPartTransferMgr // Read-only
normalChunckCh chan chunkFunc // Read-write
lowChunkCh chan chunkFunc // Read-write
suicideCh <-chan SuicideJob // Read-only
}
type SuicideJob struct{}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (ja *jobsAdmin) NewJobPartPlanFileName(jobID common.JobID, partNumber common.PartNumber) JobPartPlanFileName {
return JobPartPlanFileName(fmt.Sprintf(jobPartPlanFileNameFormat, jobID.String(), partNumber, DataSchemaVersion))
}
func (ja *jobsAdmin) FileExtension() string {
return fmt.Sprintf(".strV%05d", DataSchemaVersion)
}
// JobIDDetails returns point-in-time list of JobIDDetails
func (ja *jobsAdmin) JobIDs() []common.JobID {
var jobIDs []common.JobID
ja.jobIDToJobMgr.Iterate(false, func(k common.JobID, v IJobMgr) {
jobIDs = append(jobIDs, k)
})
return jobIDs
}
// JobMgr returns the specified JobID's JobMgr if it exists
func (ja *jobsAdmin) JobMgr(jobID common.JobID) (IJobMgr, bool) {
return ja.jobIDToJobMgr.Get(jobID)
}
// AppPathFolder returns the Azcopy application path folder.
// JobPartPlanFile will be created inside this folder.
func (ja *jobsAdmin) AppPathFolder() string {
return ja.planDir
}
// JobMgrEnsureExists returns the specified JobID's IJobMgr if it exists or creates it if it doesn't already exit
// If it does exist, then the appCtx argument is ignored.
func (ja *jobsAdmin) JobMgrEnsureExists(jobID common.JobID,
level common.LogLevel, commandString string) IJobMgr {
return ja.jobIDToJobMgr.EnsureExists(jobID,
func() IJobMgr {
// Return existing or new IJobMgr to caller
return newJobMgr(ja.logger, jobID, ja.appCtx, level, commandString, ja.logDir)
})
}
func (ja *jobsAdmin) ScheduleTransfer(priority common.JobPriority, jptm IJobPartTransferMgr) {
switch priority { // priority determines which channel handles the job part's transfers
case common.EJobPriority.Normal():
//jptm.SetChunkChannel(ja.xferChannels.normalChunckCh)
ja.coordinatorChannels.normalTransferCh <- jptm
case common.EJobPriority.Low():
//jptm.SetChunkChannel(ja.xferChannels.lowChunkCh)
ja.coordinatorChannels.lowTransferCh <- jptm
default:
ja.Panic(fmt.Errorf("invalid priority: %q", priority))
}
}
func (ja *jobsAdmin) ScheduleChunk(priority common.JobPriority, chunkFunc chunkFunc) {
switch priority { // priority determines which channel handles the job part's transfers
case common.EJobPriority.Normal():
ja.xferChannels.normalChunckCh <- chunkFunc
case common.EJobPriority.Low():
ja.xferChannels.lowChunkCh <- chunkFunc
default:
ja.Panic(fmt.Errorf("invalid priority: %q", priority))
}
}
func (ja *jobsAdmin) BytesOverWire() int64 {
return atomic.LoadInt64(&ja.pacer.bytesTransferred)
}
func (ja *jobsAdmin) ResurrectJob(jobId common.JobID, sourceSAS string, destinationSAS string) bool {
// Search the existing plan files for the PartPlans for the given jobId
// only the files which have JobId has prefix and DataSchemaVersion as Suffix
// are include in the result
files := func(prefix, ext string) []os.FileInfo {
var files []os.FileInfo
filepath.Walk(ja.planDir, func(path string, fileInfo os.FileInfo, _ error) error {
if !fileInfo.IsDir() && strings.HasPrefix(fileInfo.Name(), prefix) && strings.HasSuffix(fileInfo.Name(), ext) {
files = append(files, fileInfo)
}
return nil
})
return files
}(jobId.String(), fmt.Sprintf(".steV%d", DataSchemaVersion))
// If no files with JobId exists then return false
if len(files) == 0 {
return false
}
// sort the JobPartPlan files with respect to Part Number
sort.Sort(sortPlanFiles{Files: files})
for f := 0; f < len(files); f++ {
planFile := JobPartPlanFileName(files[f].Name())
jobID, partNum, err := planFile.Parse()
if err != nil {
continue
}
mmf := planFile.Map()
jm := ja.JobMgrEnsureExists(jobID, mmf.Plan().LogLevel, "")
jm.AddJobPart(partNum, planFile, sourceSAS, destinationSAS, false)
}
return true
}
// reconstructTheExistingJobParts reconstructs the in memory JobPartPlanInfo for existing memory map JobFile
func (ja *jobsAdmin) ResurrectJobParts() {
// Get all the Job part plan files in the plan directory
files := func(ext string) []os.FileInfo {
var files []os.FileInfo
filepath.Walk(ja.planDir, func(path string, fileInfo os.FileInfo, _ error) error {
if !fileInfo.IsDir() && strings.HasSuffix(fileInfo.Name(), ext) {
files = append(files, fileInfo)
}
return nil
})
return files
}(fmt.Sprintf(".steV%d", DataSchemaVersion))
// TODO : sort the file.
for f := 0; f < len(files); f++ {
planFile := JobPartPlanFileName(files[f].Name())
jobID, partNum, err := planFile.Parse()
if err != nil {
continue
}
mmf := planFile.Map()
//todo : call the compute transfer function here for each job.
jm := ja.JobMgrEnsureExists(jobID, mmf.Plan().LogLevel, "")
jm.AddJobPart(partNum, planFile, EMPTY_SAS_STRING, EMPTY_SAS_STRING, false)
}
}
// TODO: I think something is wrong here: I think delete and cleanup should be merged together.
// DeleteJobInfo api deletes an entry of given JobId the JobsInfo
// TODO: add the clean up logic for all Jobparts.
func (ja *jobsAdmin) DeleteJob(jobID common.JobID) {
ja.jobIDToJobMgr.Delete(jobID)
}
// cleanUpJob api unmaps all the memory map JobPartFile and deletes the JobPartFile
/*
* Load PartMap for given JobId
* Iterate through each part order of given Job and then shutdowns the JobInfo handler
* Iterate through each part order of given Job and then shutdowns the JobInfo handler
* Delete all the job part files stored on disk
* Closes the logger file opened for logging logs related to given job
* Removes the entry of given JobId from JobsInfo
*/
// TODO: take care fo this.
/*func (ja *jobsAdmin) cleanUpJob(jobID common.JobID) {
jm, found := ja.JobMgr(jobID)
if !found {
ja.Panic(fmt.Errorf("no job found with JobID %v to clean up", jobID))
}
for p := PartNumber(0); true; p++ {
jpm, found := jm.JobPartMgr(p)
if !found { // TODO
}
// TODO: Fix jpm.planMMF.Unmap() // unmapping the memory map JobPart file
err := jpm.filename.Delete()
if err != nil {
ja.Panic(fmt.Errorf("error removing the job part file %s. Failed with following error %s", jpm.filename, err))
}
//TODO: jobHandler.shutDownHandler(ji.logger)
}
ji.closeLogForJob()
// deletes the entry for given JobId from Map
ja.DeleteJob(jobID)
}
*/
func (ja *jobsAdmin) ShouldLog(level pipeline.LogLevel) bool { return ja.logger.ShouldLog(level) }
func (ja *jobsAdmin) Log(level pipeline.LogLevel, msg string) { ja.logger.Log(level, msg) }
func (ja *jobsAdmin) Panic(err error) { ja.logger.Panic(err) }
func (ja *jobsAdmin) CloseLog() { ja.logger.CloseLog() }
func (ja *jobsAdmin) slicePoolPruneLoop() {
// if something in the pool has been unused for this long, we probably don't need it
const pruneInterval = 5 * time.Second
ticker := time.NewTicker(pruneInterval)
defer ticker.Stop()
for {
select {
case <-ticker.C:
ja.slicePool.Prune()
case <-ja.appCtx.Done():
break
}
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// The jobIDToJobMgr maps each JobID to its JobMgr
type jobIDToJobMgr struct {
nocopy common.NoCopy
lock sync.RWMutex
m map[common.JobID]IJobMgr
}
func newJobIDToJobMgr() jobIDToJobMgr {
return jobIDToJobMgr{m: make(map[common.JobID]IJobMgr)}
}
func (j *jobIDToJobMgr) Set(key common.JobID, value IJobMgr) {
j.nocopy.Check()
j.lock.Lock()
j.m[key] = value
j.lock.Unlock()
}
func (j *jobIDToJobMgr) Get(key common.JobID) (value IJobMgr, found bool) {
j.nocopy.Check()
j.lock.RLock()
value, found = j.m[key]
j.lock.RUnlock()
return
}
func (j *jobIDToJobMgr) EnsureExists(jobID common.JobID, newJobMgr func() IJobMgr) IJobMgr {
j.nocopy.Check()
j.lock.Lock()
// defined variables both jm & found above condition since defined variables might get re-initialized
// in if condition if any variable in the left was not initialized.
var jm IJobMgr
var found bool
// NOTE: We look up the desired IJobMgr and add it if it's not there atomically using a write lock
if jm, found = j.m[jobID]; !found {
jm = newJobMgr()
j.m[jobID] = jm
}
j.lock.Unlock()
return jm
}
func (j *jobIDToJobMgr) Delete(key common.JobID) {
j.nocopy.Check()
j.lock.Lock()
delete(j.m, key)
j.lock.Unlock()
}
func (j *jobIDToJobMgr) Iterate(write bool, f func(k common.JobID, v IJobMgr)) {
j.nocopy.Check()
locker := sync.Locker(&j.lock)
if !write {
locker = j.lock.RLocker()
}
locker.Lock()
for k, v := range j.m {
f(k, v)
}
locker.Unlock()
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/*func goroutinePoolTest() {
const maxGoroutines = 10
gp, die := &GoroutinePool{}, make(chan struct{}, maxGoroutines)
setConcurrency := func(desiredConcurrency int32) {
goroutinesToAdd := gp.Concurrency(desiredConcurrency)
for g := int32(0); g < goroutinesToAdd; g++ {
go worker(die)
}
for g := int32(0); g > goroutinesToAdd; g-- {
die <- struct{}{}
}
}
setConcurrency(2)
time.Sleep(10 * time.Second)
setConcurrency(10)
time.Sleep(10 * time.Second)
setConcurrency(1)
time.Sleep(10 * time.Second)
setConcurrency(0)
time.Sleep(30 * time.Second)
}
var goroutinesInPool int32
func worker(die <-chan struct{}) {
atomic.AddInt32(&goroutinesInPool, 1)
loop:
for {
fmt.Printf("Count #%d\n", atomic.LoadInt32(&goroutinesInPool))
select {
case <-die:
break loop
default:
time.Sleep(time.Second * 4)
}
}
fmt.Printf("Count %d\n", atomic.AddInt32(&goroutinesInPool, -1))
}
type GoroutinePool struct {
nocopy common.NoCopy
concurrency int32
}
// Concurrency sets the desired concurrency and returns the number of goroutines that should be
// added/removed to achieve the desired concurrency. If this method returns a positive number,
// add the number of specified goroutines to the pool. If this method returns a negative number,
// kill the number of specified goroutines from the pool.
func (gp *GoroutinePool) Concurrency(concurrency int32) int32 {
if concurrency < 0 {
panic("concurrency must be >= 0")
}
gp.nocopy.Check()
return concurrency - atomic.SwapInt32(&gp.concurrency, concurrency)
}*/
<file_sep>package ste
import (
"context"
"fmt"
"mime"
"net"
"net/http"
"path/filepath"
"strings"
"sync/atomic"
"time"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/azbfs"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/Azure/azure-storage-file-go/azfile"
)
var _ IJobPartMgr = &jobPartMgr{}
type IJobPartMgr interface {
Plan() *JobPartPlanHeader
ScheduleTransfers(jobCtx context.Context)
StartJobXfer(jptm IJobPartTransferMgr)
ReportTransferDone() uint32
IsForceWriteTrue() bool
ScheduleChunks(chunkFunc chunkFunc)
RescheduleTransfer(jptm IJobPartTransferMgr)
BlobTypeOverride() common.BlobType
BlobTiers() (blockBlobTier common.BlockBlobTier, pageBlobTier common.PageBlobTier)
ShouldPutMd5() bool
SAS() (string, string)
//CancelJob()
Close()
// TODO: added for debugging purpose. remove later
OccupyAConnection()
// TODO: added for debugging purpose. remove later
ReleaseAConnection()
SlicePool() common.ByteSlicePooler
CacheLimiter() common.CacheLimiter
FileCountLimiter() common.CacheLimiter
ChunkStatusLogger() common.ChunkStatusLogger
common.ILogger
SourceProviderPipeline() pipeline.Pipeline
}
type serviceAPIVersionOverride struct{}
// ServiceAPIVersionOverride is a global variable in package ste which is a key to Service Api Version Value set in the every Job's context.
var ServiceAPIVersionOverride = serviceAPIVersionOverride{}
// DefaultServiceApiVersion is the default value of service api version that is set as value to the ServiceAPIVersionOverride in every Job's context.
var DefaultServiceApiVersion = common.GetLifecycleMgr().GetEnvironmentVariable(common.EEnvironmentVariable.DefaultServiceApiVersion())
// NewVersionPolicy creates a factory that can override the service version
// set in the request header.
// If the context has key overwrite-current-version set to false, then x-ms-version in
// request is not overwritten else it will set x-ms-version to 207-04-17
func NewVersionPolicyFactory() pipeline.Factory {
return pipeline.FactoryFunc(func(next pipeline.Policy, po *pipeline.PolicyOptions) pipeline.PolicyFunc {
return func(ctx context.Context, request pipeline.Request) (pipeline.Response, error) {
// get the service api version value using the ServiceAPIVersionOverride set in the context.
if value := ctx.Value(ServiceAPIVersionOverride); value != nil {
request.Header.Set("x-ms-version", value.(string))
}
resp, err := next.Do(ctx, request)
return resp, err
}
})
}
// Max number of idle connections per host, to be held in the connection pool inside HTTP client.
// This use to be 1000, but each consumes a handle, and on Linux total file/network handle counts can be
// tightly constrained, possibly to as low as 1024 in total. So we want a lower figure than 1000.
// 500 ought to be enough because this figure is about pooling temporarily un-used connections.
// Our max number of USED connections, at any one moment in time, is set by AZCOPY_CONCURRENCY_VALUE
// which, as at Mar 2019, defaults to 300. Because connections are constantly released and use by that pool
// of 300 goroutines, its reasonable to assume that the total number of momentarily-
// UNused connections will be much smaller than the number USED, i.e. much less than 300. So this figure
// we set here should be MORE than enough.
const AzCopyMaxIdleConnsPerHost = 500
// NewAzcopyHTTPClient creates a new HTTP client.
// We must minimize use of this, and instead maximize re-use of the returned client object.
// Why? Because that makes our connection pooling more efficient, and prevents us exhausting the
// number of available network sockets on resource-constrained Linux systems. (E.g. when
// 'ulimit -Hn' is low).
func NewAzcopyHTTPClient() *http.Client {
return &http.Client{
Transport: &http.Transport{
Proxy: http.ProxyFromEnvironment,
// We use Dial instead of DialContext as DialContext has been reported to cause slower performance.
Dial /*Context*/ : (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
DualStack: true,
}).Dial, /*Context*/
MaxIdleConns: 0, // No limit
MaxIdleConnsPerHost: AzCopyMaxIdleConnsPerHost,
IdleConnTimeout: 180 * time.Second,
TLSHandshakeTimeout: 10 * time.Second,
ExpectContinueTimeout: 1 * time.Second,
DisableKeepAlives: false,
DisableCompression: true, // must disable the auto-decompression of gzipped files, and just download the gzipped version. See https://github.com/Azure/azure-storage-azcopy/issues/374
MaxResponseHeaderBytes: 0,
//ResponseHeaderTimeout: time.Duration{},
//ExpectContinueTimeout: time.Duration{},
},
}
}
// newAzcopyHTTPClientFactory creates a HTTPClientPolicyFactory object that sends HTTP requests to a Go's default http.Client.
func newAzcopyHTTPClientFactory(pipelineHTTPClient *http.Client) pipeline.Factory {
return pipeline.FactoryFunc(func(next pipeline.Policy, po *pipeline.PolicyOptions) pipeline.PolicyFunc {
return func(ctx context.Context, request pipeline.Request) (pipeline.Response, error) {
r, err := pipelineHTTPClient.Do(request.WithContext(ctx))
if err != nil {
err = pipeline.NewError(err, "HTTP request failed")
}
return pipeline.NewHTTPResponse(r), err
}
})
}
// NewBlobPipeline creates a Pipeline using the specified credentials and options.
func NewBlobPipeline(c azblob.Credential, o azblob.PipelineOptions, r XferRetryOptions, p *pacer, client *http.Client) pipeline.Pipeline {
if c == nil {
panic("c can't be nil")
}
// Closest to API goes first; closest to the wire goes last
f := []pipeline.Factory{
azblob.NewTelemetryPolicyFactory(o.Telemetry),
azblob.NewUniqueRequestIDPolicyFactory(),
NewBlobXferRetryPolicyFactory(r), // actually retry the operation
newRetryNotificationPolicyFactory(), // record that a retry status was returned
c,
pipeline.MethodFactoryMarker(), // indicates at what stage in the pipeline the method factory is invoked
//NewPacerPolicyFactory(p),
NewVersionPolicyFactory(),
NewRequestLogPolicyFactory(RequestLogOptions{LogWarningIfTryOverThreshold: o.RequestLog.LogWarningIfTryOverThreshold}),
}
return pipeline.NewPipeline(f, pipeline.Options{HTTPSender: newAzcopyHTTPClientFactory(client), Log: o.Log})
}
// NewBlobFSPipeline creates a pipeline for transfers to and from BlobFS Service
// The blobFS operations currently in azcopy are supported by SharedKey Credentials
func NewBlobFSPipeline(c azbfs.Credential, o azbfs.PipelineOptions, r XferRetryOptions, p *pacer, client *http.Client) pipeline.Pipeline {
if c == nil {
panic("c can't be nil")
}
// Closest to API goes first; closest to the wire goes last
f := []pipeline.Factory{
azbfs.NewTelemetryPolicyFactory(o.Telemetry),
azbfs.NewUniqueRequestIDPolicyFactory(),
NewBFSXferRetryPolicyFactory(r), // actually retry the operation
newRetryNotificationPolicyFactory(), // record that a retry status was returned
}
f = append(f, c)
f = append(f,
pipeline.MethodFactoryMarker(), // indicates at what stage in the pipeline the method factory is invoked
NewPacerPolicyFactory(p),
azbfs.NewRequestLogPolicyFactory(o.RequestLog))
return pipeline.NewPipeline(f, pipeline.Options{HTTPSender: newAzcopyHTTPClientFactory(client), Log: o.Log})
}
// NewFilePipeline creates a Pipeline using the specified credentials and options.
func NewFilePipeline(c azfile.Credential, o azfile.PipelineOptions, r azfile.RetryOptions, p *pacer, client *http.Client) pipeline.Pipeline {
if c == nil {
panic("c can't be nil")
}
// Closest to API goes first; closest to the wire goes last
f := []pipeline.Factory{
azfile.NewTelemetryPolicyFactory(o.Telemetry),
azfile.NewUniqueRequestIDPolicyFactory(),
azfile.NewRetryPolicyFactory(r), // actually retry the operation
newRetryNotificationPolicyFactory(), // record that a retry status was returned
c,
pipeline.MethodFactoryMarker(), // indicates at what stage in the pipeline the method factory is invoked
NewPacerPolicyFactory(p),
NewVersionPolicyFactory(),
azfile.NewRequestLogPolicyFactory(o.RequestLog),
}
return pipeline.NewPipeline(f, pipeline.Options{HTTPSender: newAzcopyHTTPClientFactory(client), Log: o.Log})
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// jobPartMgr represents the runtime information for a Job's Part
type jobPartMgr struct {
// These fields represent the part's existence
jobMgr IJobMgr // Refers to this part's Job (for logging, cancelling, etc.)
filename JobPartPlanFileName
// sourceSAS defines the sas of the source of the Job. If the source is local Location, then sas is empty.
// Since sas is not persisted in JobPartPlan file, it stripped from the source and stored in memory in JobPart Manager
sourceSAS string
// destinationSAS defines the sas of the destination of the Job. If the destination is local Location, then sas is empty.
// Since sas is not persisted in JobPartPlan file, it stripped from the destination and stored in memory in JobPart Manager
destinationSAS string
// When the part is schedule to run (inprogress), the below fields are used
planMMF *JobPartPlanMMF // This Job part plan's MMF
// Additional data shared by all of this Job Part's transfers; initialized when this jobPartMgr is created
blobHTTPHeaders azblob.BlobHTTPHeaders
fileHTTPHeaders azfile.FileHTTPHeaders
blobFSHTTPHeaders azbfs.BlobFSHTTPHeaders
// Additional data shared by all of this Job Part's transfers; initialized when this jobPartMgr is created
blockBlobTier common.BlockBlobTier
// Additional data shared by all of this Job Part's transfers; initialized when this jobPartMgr is created
pageBlobTier common.PageBlobTier
// Additional data shared by all of this Job Part's transfers; initialized when this jobPartMgr is created
putMd5 bool
blobMetadata azblob.Metadata
fileMetadata azfile.Metadata
blobTypeOverride common.BlobType // User specified blob type
preserveLastModifiedTime bool
newJobXfer newJobXfer // Method used to start the transfer
priority common.JobPriority
pacer *pacer // Pacer used by chunks when uploading data
slicePool common.ByteSlicePooler
cacheLimiter common.CacheLimiter
fileCountLimiter common.CacheLimiter
pipeline pipeline.Pipeline // ordered list of Factory objects and an object implementing the HTTPSender interface
sourceProviderPipeline pipeline.Pipeline
// used defensively to protect double init
atomicPipelinesInitedIndicator uint32
// numberOfTransfersDone_doNotUse represents the number of transfer of JobPartOrder
// which are either completed or failed
// numberOfTransfersDone_doNotUse determines the final cancellation of JobPartOrder
atomicTransfersDone uint32
}
func (jpm *jobPartMgr) Plan() *JobPartPlanHeader { return jpm.planMMF.Plan() }
// ScheduleTransfers schedules this job part's transfers. It is called when a new job part is ordered & is also called to resume a paused Job
func (jpm *jobPartMgr) ScheduleTransfers(jobCtx context.Context) {
jpm.atomicTransfersDone = 0 // Reset the # of transfers done back to 0
// partplan file is opened and mapped when job part is added
//jpm.planMMF = jpm.filename.Map() // Open the job part plan file & memory-map it in
plan := jpm.planMMF.Plan()
// get the list of include / exclude transfers
includeTransfer, excludeTransfer := jpm.jobMgr.IncludeExclude()
// *** Open the job part: process any job part plan-setting used by all transfers ***
dstData := plan.DstBlobData
jpm.blobHTTPHeaders = azblob.BlobHTTPHeaders{
ContentType: string(dstData.ContentType[:dstData.ContentTypeLength]),
ContentEncoding: string(dstData.ContentEncoding[:dstData.ContentEncodingLength]),
ContentDisposition: string(dstData.ContentDisposition[:dstData.ContentDispositionLength]),
ContentLanguage: string(dstData.ContentLanguage[:dstData.ContentLanguageLength]),
CacheControl: string(dstData.CacheControl[:dstData.CacheControlLength]),
}
jpm.blobFSHTTPHeaders = azbfs.BlobFSHTTPHeaders{
ContentType: string(dstData.ContentType[:dstData.ContentTypeLength]),
ContentEncoding: string(dstData.ContentEncoding[:dstData.ContentEncodingLength]),
ContentDisposition: string(dstData.ContentDisposition[:dstData.ContentDispositionLength]),
ContentLanguage: string(dstData.ContentLanguage[:dstData.ContentLanguageLength]),
CacheControl: string(dstData.CacheControl[:dstData.CacheControlLength]),
}
jpm.putMd5 = dstData.PutMd5
jpm.blockBlobTier = dstData.BlockBlobTier
jpm.pageBlobTier = dstData.PageBlobTier
jpm.fileHTTPHeaders = azfile.FileHTTPHeaders{
ContentType: string(dstData.ContentType[:dstData.ContentTypeLength]),
ContentEncoding: string(dstData.ContentEncoding[:dstData.ContentEncodingLength]),
ContentDisposition: string(dstData.ContentDisposition[:dstData.ContentDispositionLength]),
ContentLanguage: string(dstData.ContentLanguage[:dstData.ContentLanguageLength]),
CacheControl: string(dstData.CacheControl[:dstData.CacheControlLength]),
}
// For this job part, split the metadata string apart and create an azblob.Metadata out of it
metadataString := string(dstData.Metadata[:dstData.MetadataLength])
jpm.blobMetadata = azblob.Metadata{}
if len(metadataString) > 0 {
for _, keyAndValue := range strings.Split(metadataString, ";") { // key/value pairs are separated by ';'
kv := strings.Split(keyAndValue, "=") // key/value are separated by '='
jpm.blobMetadata[kv[0]] = kv[1]
}
}
jpm.fileMetadata = azfile.Metadata{}
if len(metadataString) > 0 {
for _, keyAndValue := range strings.Split(metadataString, ";") { // key/value pairs are separated by ';'
kv := strings.Split(keyAndValue, "=") // key/value are separated by '='
jpm.fileMetadata[kv[0]] = kv[1]
}
}
jpm.preserveLastModifiedTime = plan.DstLocalData.PreserveLastModifiedTime
jpm.blobTypeOverride = plan.DstBlobData.BlobType
jpm.newJobXfer = computeJobXfer(plan.FromTo, plan.DstBlobData.BlobType)
jpm.priority = plan.Priority
jpm.createPipelines(jobCtx) // pipeline is created per job part manager
// *** Schedule this job part's transfers ***
for t := uint32(0); t < plan.NumTransfers; t++ {
jppt := plan.Transfer(t)
ts := jppt.TransferStatus()
if ts == common.ETransferStatus.Success() {
jpm.ReportTransferDone() // Don't schedule an already-completed/failed transfer
continue
}
// If the list of transfer to be included is passed
// then check current transfer exists in the list of included transfer
// If it doesn't exists, skip the transfer
if len(includeTransfer) > 0 {
// Get the source string from the part plan header
src, _ := plan.TransferSrcDstStrings(t)
// If source doesn't exists, skip the transfer
_, ok := includeTransfer[src]
if !ok {
jpm.ReportTransferDone() // Don't schedule transfer which is not mentioned to be included
continue
}
}
// If the list of transfer to be excluded is passed
// then check the current transfer in the list of excluded transfer
// If it exists, then skip the transfer
if len(excludeTransfer) > 0 {
// Get the source string from the part plan header
src, _ := plan.TransferSrcDstStrings(t)
// If the source exists in the list of excluded transfer
// skip the transfer
_, ok := excludeTransfer[src]
if ok {
jpm.ReportTransferDone() // Don't schedule transfer which is mentioned to be excluded
continue
}
}
// If the transfer was failed, then while rescheduling the transfer marking it Started.
if ts == common.ETransferStatus.Failed() {
jppt.SetTransferStatus(common.ETransferStatus.Started(), true)
}
// Each transfer gets its own context (so any chunk can cancel the whole transfer) based off the job's context
transferCtx, transferCancel := context.WithCancel(jobCtx)
jptm := &jobPartTransferMgr{
jobPartMgr: jpm,
jobPartPlanTransfer: jppt,
transferIndex: t,
ctx: transferCtx,
cancel: transferCancel,
//TODO: insert the factory func interface in jptm.
// numChunks will be set by the transfer's prologue method
}
if jpm.ShouldLog(pipeline.LogInfo) {
jpm.Log(pipeline.LogInfo, fmt.Sprintf("scheduling JobID=%v, Part#=%d, Transfer#=%d, priority=%v", plan.JobID, plan.PartNum, t, plan.Priority))
}
JobsAdmin.(*jobsAdmin).ScheduleTransfer(jpm.priority, jptm)
// This sets the atomic variable atomicAllTransfersScheduled to 1
// atomicAllTransfersScheduled variables is used in case of resume job
// Since iterating the JobParts and scheduling transfer is independent
// a variable is required which defines whether last part is resumed or not
if plan.IsFinalPart {
jpm.jobMgr.ConfirmAllTransfersScheduled()
}
}
}
func (jpm *jobPartMgr) ScheduleChunks(chunkFunc chunkFunc) {
JobsAdmin.ScheduleChunk(jpm.priority, chunkFunc)
}
func (jpm *jobPartMgr) RescheduleTransfer(jptm IJobPartTransferMgr) {
JobsAdmin.(*jobsAdmin).ScheduleTransfer(jpm.priority, jptm)
}
func (jpm *jobPartMgr) createPipelines(ctx context.Context) {
if atomic.SwapUint32(&jpm.atomicPipelinesInitedIndicator, 1) != 0 {
panic("init client and pipelines for same jobPartMgr twice")
}
fromTo := jpm.planMMF.Plan().FromTo
credInfo := jpm.jobMgr.getInMemoryTransitJobState().credentialInfo
userAgent := common.UserAgent
if fromTo.From() == common.ELocation.S3() {
userAgent = common.S3ImportUserAgent
}
credOption := common.CredentialOpOptions{
LogInfo: func(str string) { jpm.Log(pipeline.LogInfo, str) },
LogError: func(str string) { jpm.Log(pipeline.LogError, str) },
Panic: jpm.Panic,
CallerID: fmt.Sprintf("JobID=%v, Part#=%d", jpm.Plan().JobID, jpm.Plan().PartNum),
Cancel: jpm.jobMgr.Cancel,
}
// TODO: Consider to remove XferRetryPolicy and Options?
xferRetryOption := XferRetryOptions{
Policy: 0,
MaxTries: UploadMaxTries, // TODO: Consider to unify options.
TryTimeout: UploadTryTimeout,
RetryDelay: UploadRetryDelay,
MaxRetryDelay: UploadMaxRetryDelay}
// Create source info provider's pipeline for S2S copy.
if fromTo == common.EFromTo.BlobBlob() {
jpm.sourceProviderPipeline = NewBlobPipeline(
azblob.NewAnonymousCredential(),
azblob.PipelineOptions{
Log: jpm.jobMgr.PipelineLogInfo(),
Telemetry: azblob.TelemetryOptions{
Value: userAgent,
},
},
xferRetryOption,
jpm.pacer,
jpm.jobMgr.HttpClient())
}
if fromTo == common.EFromTo.FileBlob() {
jpm.sourceProviderPipeline = NewFilePipeline(
azfile.NewAnonymousCredential(),
azfile.PipelineOptions{
Log: jpm.jobMgr.PipelineLogInfo(),
Telemetry: azfile.TelemetryOptions{
Value: userAgent,
},
},
azfile.RetryOptions{
Policy: azfile.RetryPolicyExponential,
MaxTries: UploadMaxTries,
TryTimeout: UploadTryTimeout,
RetryDelay: UploadRetryDelay,
MaxRetryDelay: UploadMaxRetryDelay,
},
jpm.pacer,
jpm.jobMgr.HttpClient())
}
// Create pipeline for data transfer.
switch fromTo {
case common.EFromTo.BlobTrash(), common.EFromTo.BlobLocal(), common.EFromTo.LocalBlob(),
common.EFromTo.BlobBlob(), common.EFromTo.FileBlob(), common.EFromTo.S3Blob():
credential := common.CreateBlobCredential(ctx, credInfo, credOption)
jpm.Log(pipeline.LogInfo, fmt.Sprintf("JobID=%v, credential type: %v", jpm.Plan().JobID, credInfo.CredentialType))
jpm.pipeline = NewBlobPipeline(
credential,
azblob.PipelineOptions{
Log: jpm.jobMgr.PipelineLogInfo(),
Telemetry: azblob.TelemetryOptions{
Value: userAgent,
},
},
xferRetryOption,
jpm.pacer,
jpm.jobMgr.HttpClient())
// Create pipeline for Azure BlobFS.
case common.EFromTo.BlobFSLocal(), common.EFromTo.LocalBlobFS():
credential := common.CreateBlobFSCredential(ctx, credInfo, credOption)
jpm.Log(pipeline.LogInfo, fmt.Sprintf("JobID=%v, credential type: %v", jpm.Plan().JobID, credInfo.CredentialType))
jpm.pipeline = NewBlobFSPipeline(
credential,
azbfs.PipelineOptions{
Log: jpm.jobMgr.PipelineLogInfo(),
Telemetry: azbfs.TelemetryOptions{
Value: userAgent,
},
},
xferRetryOption,
jpm.pacer,
jpm.jobMgr.HttpClient())
// Create pipeline for Azure File.
case common.EFromTo.FileTrash(), common.EFromTo.FileLocal(), common.EFromTo.LocalFile():
jpm.pipeline = NewFilePipeline(
azfile.NewAnonymousCredential(),
azfile.PipelineOptions{
Log: jpm.jobMgr.PipelineLogInfo(),
Telemetry: azfile.TelemetryOptions{
Value: userAgent,
},
},
azfile.RetryOptions{
Policy: azfile.RetryPolicyExponential,
MaxTries: UploadMaxTries,
TryTimeout: UploadTryTimeout,
RetryDelay: UploadRetryDelay,
MaxRetryDelay: UploadMaxRetryDelay,
},
jpm.pacer,
jpm.jobMgr.HttpClient())
default:
panic(fmt.Errorf("Unrecognized from-to: %q", fromTo.String()))
}
}
func (jpm *jobPartMgr) SlicePool() common.ByteSlicePooler {
return jpm.slicePool
}
func (jpm *jobPartMgr) CacheLimiter() common.CacheLimiter {
return jpm.cacheLimiter
}
func (jpm *jobPartMgr) FileCountLimiter() common.CacheLimiter {
return jpm.fileCountLimiter
}
func (jpm *jobPartMgr) StartJobXfer(jptm IJobPartTransferMgr) {
jpm.newJobXfer(jptm, jpm.pipeline, jpm.pacer)
}
func (jpm *jobPartMgr) IsForceWriteTrue() bool {
return jpm.Plan().ForceWrite
}
func (jpm *jobPartMgr) blobDstData(fullFilePath string, dataFileToXfer []byte) (headers azblob.BlobHTTPHeaders, metadata azblob.Metadata) {
if jpm.planMMF.Plan().DstBlobData.NoGuessMimeType || dataFileToXfer == nil {
return jpm.blobHTTPHeaders, jpm.blobMetadata
}
return azblob.BlobHTTPHeaders{ContentType: jpm.inferContentType(fullFilePath, dataFileToXfer), ContentLanguage: jpm.blobHTTPHeaders.ContentLanguage, ContentDisposition: jpm.blobHTTPHeaders.ContentDisposition, ContentEncoding: jpm.blobHTTPHeaders.ContentEncoding, CacheControl: jpm.blobHTTPHeaders.CacheControl}, jpm.blobMetadata
}
func (jpm *jobPartMgr) fileDstData(fullFilePath string, dataFileToXfer []byte) (headers azfile.FileHTTPHeaders, metadata azfile.Metadata) {
if jpm.planMMF.Plan().DstBlobData.NoGuessMimeType || dataFileToXfer == nil {
return jpm.fileHTTPHeaders, jpm.fileMetadata
}
return azfile.FileHTTPHeaders{ContentType: jpm.inferContentType(fullFilePath, dataFileToXfer), ContentLanguage: jpm.fileHTTPHeaders.ContentLanguage, ContentEncoding: jpm.fileHTTPHeaders.ContentEncoding, ContentDisposition: jpm.fileHTTPHeaders.ContentDisposition, CacheControl: jpm.fileHTTPHeaders.CacheControl}, jpm.fileMetadata
}
func (jpm *jobPartMgr) bfsDstData(fullFilePath string, dataFileToXfer []byte) (headers azbfs.BlobFSHTTPHeaders) {
if jpm.planMMF.Plan().DstBlobData.NoGuessMimeType || dataFileToXfer == nil {
return jpm.blobFSHTTPHeaders
}
return azbfs.BlobFSHTTPHeaders{ContentType: jpm.inferContentType(fullFilePath, dataFileToXfer), ContentLanguage: jpm.blobFSHTTPHeaders.ContentLanguage, ContentEncoding: jpm.blobFSHTTPHeaders.ContentEncoding, ContentDisposition: jpm.blobFSHTTPHeaders.ContentDisposition, CacheControl: jpm.blobFSHTTPHeaders.CacheControl}
}
func (jpm *jobPartMgr) inferContentType(fullFilePath string, dataFileToXfer []byte) string {
if guessedType := mime.TypeByExtension(filepath.Ext(fullFilePath)); guessedType != "" {
return guessedType
}
return http.DetectContentType(dataFileToXfer)
}
func (jpm *jobPartMgr) BlobTypeOverride() common.BlobType {
return jpm.blobTypeOverride
}
func (jpm *jobPartMgr) BlobTiers() (blockBlobTier common.BlockBlobTier, pageBlobTier common.PageBlobTier) {
return jpm.blockBlobTier, jpm.pageBlobTier
}
func (jpm *jobPartMgr) ShouldPutMd5() bool {
return jpm.putMd5
}
func (jpm *jobPartMgr) SAS() (string, string) {
return jpm.sourceSAS, jpm.destinationSAS
}
func (jpm *jobPartMgr) localDstData() *JobPartPlanDstLocal {
return &jpm.Plan().DstLocalData
}
// Call Done when a transfer has completed its epilog; this method returns the number of transfers completed so far
func (jpm *jobPartMgr) ReportTransferDone() (transfersDone uint32) {
transfersDone = atomic.AddUint32(&jpm.atomicTransfersDone, 1)
if jpm.ShouldLog(pipeline.LogInfo) {
plan := jpm.Plan()
jpm.Log(pipeline.LogInfo, fmt.Sprintf("JobID=%v, Part#=%d, TransfersDone=%d of %d", plan.JobID, plan.PartNum, transfersDone, plan.NumTransfers))
}
if transfersDone == jpm.planMMF.Plan().NumTransfers {
jpm.jobMgr.ReportJobPartDone()
}
return transfersDone
}
//func (jpm *jobPartMgr) Cancel() { jpm.jobMgr.Cancel() }
func (jpm *jobPartMgr) Close() {
jpm.planMMF.Unmap()
// Clear other fields to all for GC
jpm.blobHTTPHeaders = azblob.BlobHTTPHeaders{}
jpm.blobMetadata = azblob.Metadata{}
jpm.fileHTTPHeaders = azfile.FileHTTPHeaders{}
jpm.fileMetadata = azfile.Metadata{}
jpm.blobFSHTTPHeaders = azbfs.BlobFSHTTPHeaders{}
jpm.preserveLastModifiedTime = false
// TODO: Delete file?
/*if err := os.Remove(jpm.planFile.Name()); err != nil {
jpm.Panic(fmt.Errorf("error removing Job Part Plan file %s. Error=%v", jpm.planFile.Name(), err))
}*/
}
// TODO: added for debugging purpose. remove later
// Add 1 to the active number of goroutine performing the transfer or executing the chunkFunc
func (jpm *jobPartMgr) OccupyAConnection() {
jpm.jobMgr.OccupyAConnection()
}
// Sub 1 from the active number of goroutine performing the transfer or executing the chunkFunc
// TODO: added for debugging purpose. remove later
func (jpm *jobPartMgr) ReleaseAConnection() {
jpm.jobMgr.ReleaseAConnection()
}
func (jpm *jobPartMgr) ShouldLog(level pipeline.LogLevel) bool { return jpm.jobMgr.ShouldLog(level) }
func (jpm *jobPartMgr) Log(level pipeline.LogLevel, msg string) { jpm.jobMgr.Log(level, msg) }
func (jpm *jobPartMgr) Panic(err error) { jpm.jobMgr.Panic(err) }
func (jpm *jobPartMgr) ChunkStatusLogger() common.ChunkStatusLogger {
return jpm.jobMgr.ChunkStatusLogger()
}
func (jpm *jobPartMgr) SourceProviderPipeline() pipeline.Pipeline {
return jpm.sourceProviderPipeline
}
// TODO: Can we delete this method?
// numberOfTransfersDone returns the numberOfTransfersDone_doNotUse of JobPartPlanInfo
// instance in thread safe manner
//func (jpm *jobPartMgr) numberOfTransfersDone() uint32 { return atomic.LoadUint32(&jpm.numberOfTransfersDone_doNotUse)}
// setNumberOfTransfersDone sets the number of transfers done to a specific value
// in a thread safe manner
//func (jppi *jobPartPlanInfo) setNumberOfTransfersDone(val uint32) {
// atomic.StoreUint32(&jPartPlanInfo.numberOfTransfersDone_doNotUse, val)
//}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"context"
"fmt"
"net/http"
"runtime"
"strings"
"sync"
"sync/atomic"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
)
var _ IJobMgr = &jobMgr{}
type PartNumber = common.PartNumber
// InMemoryTransitJobState defines job state transit in memory, and not in JobPartPlan file.
// Note: InMemoryTransitJobState should only be set when request come from cmd(FE) module to STE module.
// In memory CredentialInfo is currently maintained per job in STE, as FE could have many-to-one relationship with STE,
// i.e. different jobs could have different OAuth tokens requested from FE, and these jobs can run at same time in STE.
// This can be optimized if FE would no more be another module vs STE module.
type InMemoryTransitJobState struct {
credentialInfo common.CredentialInfo
}
type IJobMgr interface {
JobID() common.JobID
JobPartMgr(partNum PartNumber) (IJobPartMgr, bool)
//Throughput() XferThroughput
AddJobPart(partNum PartNumber, planFile JobPartPlanFileName, sourceSAS string,
destinationSAS string, scheduleTransfers bool) IJobPartMgr
SetIncludeExclude(map[string]int, map[string]int)
IncludeExclude() (map[string]int, map[string]int)
ResumeTransfers(appCtx context.Context)
AllTransfersScheduled() bool
ConfirmAllTransfersScheduled()
ResetAllTransfersScheduled()
PipelineLogInfo() pipeline.LogOptions
ReportJobPartDone() uint32
Context() context.Context
Cancel()
// TODO: added for debugging purpose. remove later
OccupyAConnection()
// TODO: added for debugging purpose. remove later
ReleaseAConnection()
// TODO: added for debugging purpose. remove later
ActiveConnections() int64
GetPerfInfo() (displayStrings []string, constraint common.PerfConstraint)
//Close()
getInMemoryTransitJobState() InMemoryTransitJobState // get in memory transit job state saved in this job.
setInMemoryTransitJobState(state InMemoryTransitJobState) // set in memory transit job state saved in this job.
ChunkStatusLogger() common.ChunkStatusLogger
HttpClient() *http.Client
common.ILoggerCloser
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func newJobMgr(appLogger common.ILogger, jobID common.JobID, appCtx context.Context, level common.LogLevel, commandString string, logFileFolder string) IJobMgr {
// atomicAllTransfersScheduled is set to 1 since this api is also called when new job part is ordered.
enableChunkLogOutput := level.ToPipelineLogLevel() == pipeline.LogDebug
jm := jobMgr{jobID: jobID, jobPartMgrs: newJobPartToJobPartMgr(), include: map[string]int{}, exclude: map[string]int{},
httpClient: NewAzcopyHTTPClient(),
logger: common.NewJobLogger(jobID, level, appLogger, logFileFolder),
chunkStatusLogger: common.NewChunkStatusLogger(jobID, logFileFolder, enableChunkLogOutput),
/*Other fields remain zero-value until this job is scheduled */}
jm.reset(appCtx, commandString)
return &jm
}
func (jm *jobMgr) reset(appCtx context.Context, commandString string) IJobMgr {
jm.logger.OpenLog()
// log the user given command to the job log file.
// since the log file is opened in case of resume, list and many other operations
// for which commandString passed is empty, the length check is added
if len(commandString) > 0 {
jm.logger.Log(pipeline.LogInfo, fmt.Sprintf("Job-Command %s", commandString))
}
jm.logConcurrencyParameters()
jm.ctx, jm.cancel = context.WithCancel(appCtx)
atomic.StoreUint64(&jm.atomicNumberOfBytesCovered, 0)
atomic.StoreUint64(&jm.atomicTotalBytesToXfer, 0)
jm.partsDone = 0
return jm
}
func (jm *jobMgr) logConcurrencyParameters() {
jm.logger.Log(pipeline.LogInfo, fmt.Sprintf("Number of CPUs: %d", runtime.NumCPU()))
jm.logger.Log(pipeline.LogInfo, fmt.Sprintf("Max file buffer RAM %.3f GB", float32(JobsAdmin.(*jobsAdmin).cacheLimiter.Limit())/(1024*1024*1024)))
jm.logger.Log(pipeline.LogInfo, fmt.Sprintf("Max open files when downloading: %d", JobsAdmin.(*jobsAdmin).fileCountLimiter.Limit()))
jm.logger.Log(pipeline.LogInfo, fmt.Sprintf("Max concurrent transfer initiation routines: %d", NumTransferInitiationRoutines))
// TODO: find a way to add concurrency value here (i.e. number of chunk func worker go routines)
}
// jobMgr represents the runtime information for a Job
type jobMgr struct {
// NOTE: for the 64 bit atomic functions to work on a 32 bit system, we have to guarantee the right 64-bit alignment
// so the 64 bit integers are placed first in the struct to avoid future breaks
// refer to: https://golang.org/pkg/sync/atomic/#pkg-note-BUG
atomicNumberOfBytesCovered uint64
atomicTotalBytesToXfer uint64
// atomicCurrentConcurrentConnections defines the number of active goroutines performing the transfer / executing the chunk func
// TODO: added for debugging purpose. remove later
atomicCurrentConcurrentConnections int64
// atomicAllTransfersScheduled defines whether all job parts have been iterated and resumed or not
atomicAllTransfersScheduled int32
atomicTransferDirection common.TransferDirection
logger common.ILoggerResetable
chunkStatusLogger common.ChunkStatusLoggerCloser
jobID common.JobID // The Job's unique ID
ctx context.Context
cancel context.CancelFunc
// Share the same HTTP Client across all job parts, so that the we maximize re-use of
// its internal connection pool
httpClient *http.Client
jobPartMgrs jobPartToJobPartMgr // The map of part #s to JobPartMgrs
// partsDone keep the count of completed part of the Job.
partsDone uint32
//throughput common.CountPerSecond // TODO: Set LastCheckedTime to now
inMemoryTransitJobState InMemoryTransitJobState
// list of transfer mentioned to include only then while resuming the job
include map[string]int
// list of transfer mentioned to exclude while resuming the job
exclude map[string]int
finalPartOrdered bool
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (jm *jobMgr) Progress() (uint64, uint64) {
return atomic.LoadUint64(&jm.atomicNumberOfBytesCovered),
atomic.LoadUint64(&jm.atomicTotalBytesToXfer)
}
//func (jm *jobMgr) Throughput() XferThroughput { return jm.throughput }
// JobID returns the JobID that this jobMgr managers
func (jm *jobMgr) JobID() common.JobID { return jm.jobID }
// JobPartMgr looks up a job's part
func (jm *jobMgr) JobPartMgr(partNumber PartNumber) (IJobPartMgr, bool) {
return jm.jobPartMgrs.Get(partNumber)
}
// Add 1 to the active number of goroutine performing the transfer or executing the chunkFunc
// TODO: added for debugging purpose. remove later
func (jm *jobMgr) OccupyAConnection() {
atomic.AddInt64(&jm.atomicCurrentConcurrentConnections, 1)
}
// Sub 1 from the active number of goroutine performing the transfer or executing the chunkFunc
// TODO: added for debugging purpose. remove later
func (jm *jobMgr) ReleaseAConnection() {
atomic.AddInt64(&jm.atomicCurrentConcurrentConnections, -1)
}
// returns the number of goroutines actively performing the transfer / executing the chunkFunc
// TODO: added for debugging purpose. remove later
func (jm *jobMgr) ActiveConnections() int64 {
return atomic.LoadInt64(&jm.atomicCurrentConcurrentConnections)
}
// GetPerfStrings returns strings that may be logged for performance diagnostic purposes
// The number and content of strings may change as we enhance our perf diagnostics
func (jm *jobMgr) GetPerfInfo() (displayStrings []string, constraint common.PerfConstraint) {
atomicTransferDirection := jm.atomicTransferDirection.AtomicLoad()
// get data appropriate to our current transfer direction
chunkStateCounts := jm.chunkStatusLogger.GetCounts(atomicTransferDirection)
// convert the counts to simple strings for consumption by callers
const format = "%c: %2d"
result := make([]string, len(chunkStateCounts)+1)
total := int64(0)
for i, c := range chunkStateCounts {
result[i] = fmt.Sprintf(format, c.WaitReason.Name[0], c.Count)
total += c.Count
}
result[len(result)-1] = fmt.Sprintf(format, 'T', total)
con := jm.chunkStatusLogger.GetPrimaryPerfConstraint(atomicTransferDirection)
// logging from here is a bit of a hack
// TODO: can we find a better way to get this info into the log? The caller is at app level,
// not job level, so can't log it directly AFAICT.
jm.logPerfInfo(result, con)
return result, con
}
func (jm *jobMgr) logPerfInfo(displayStrings []string, constraint common.PerfConstraint) {
constraintString := fmt.Sprintf("primary performance constraint is %s", constraint)
msg := fmt.Sprintf("PERF: %s. States: %s", constraintString, strings.Join(displayStrings, ", "))
jm.Log(pipeline.LogInfo, msg)
}
// initializeJobPartPlanInfo func initializes the JobPartPlanInfo handler for given JobPartOrder
func (jm *jobMgr) AddJobPart(partNum PartNumber, planFile JobPartPlanFileName, sourceSAS string,
destinationSAS string, scheduleTransfers bool) IJobPartMgr {
jpm := &jobPartMgr{jobMgr: jm, filename: planFile, sourceSAS: sourceSAS,
destinationSAS: destinationSAS, pacer: JobsAdmin.(*jobsAdmin).pacer,
slicePool: JobsAdmin.(*jobsAdmin).slicePool,
cacheLimiter: JobsAdmin.(*jobsAdmin).cacheLimiter,
fileCountLimiter: JobsAdmin.(*jobsAdmin).fileCountLimiter}
jpm.planMMF = jpm.filename.Map()
jm.jobPartMgrs.Set(partNum, jpm)
jm.finalPartOrdered = jpm.planMMF.Plan().IsFinalPart
jm.setDirection(jpm.Plan().FromTo)
if scheduleTransfers {
// If the schedule transfer is set to true
// Instead of the scheduling the Transfer for given JobPart
// JobPart is put into the partChannel
// from where it is picked up and scheduled
//jpm.ScheduleTransfers(jm.ctx, make(map[string]int), make(map[string]int))
JobsAdmin.QueueJobParts(jpm)
}
return jpm
}
// Remembers which direction we are running in (upload, download or neither (for service to service))
// It actually remembers the direction that our most recently-added job PART is running in,
// because that's where the fromTo information can be found,
// but we assume that all the job parts are running in the same direction.
// TODO: Optimize this when it's necessary for delete.
func (jm *jobMgr) setDirection(fromTo common.FromTo) {
fromIsLocal := fromTo.From() == common.ELocation.Local()
toIsLocal := fromTo.To() == common.ELocation.Local()
isUpload := fromIsLocal && !toIsLocal
isDownload := !fromIsLocal && toIsLocal
isS2SCopy := fromTo.From().IsRemote() && fromTo.To().IsRemote()
if isUpload {
jm.atomicTransferDirection.AtomicStore(common.ETransferDirection.Upload())
}
if isDownload {
jm.atomicTransferDirection.AtomicStore(common.ETransferDirection.Download())
}
if isS2SCopy {
jm.atomicTransferDirection.AtomicStore(common.ETransferDirection.S2SCopy())
}
}
func (jm *jobMgr) HttpClient() *http.Client {
return jm.httpClient
}
// SetIncludeExclude sets the include / exclude list of transfers
// supplied with resume command to include or exclude mentioned transfers
func (jm *jobMgr) SetIncludeExclude(include, exclude map[string]int) {
jm.include = include
jm.exclude = exclude
}
// Returns the list of transfer mentioned to include / exclude
func (jm *jobMgr) IncludeExclude() (map[string]int, map[string]int) {
return jm.include, jm.exclude
}
// ScheduleTransfers schedules this job part's transfers. It is called when a new job part is ordered & is also called to resume a paused Job
func (jm *jobMgr) ResumeTransfers(appCtx context.Context) {
jm.reset(appCtx, "")
// Since while creating the JobMgr, atomicAllTransfersScheduled is set to true
// reset it to false while resuming it
//jm.ResetAllTransfersScheduled()
jm.jobPartMgrs.Iterate(false, func(p common.PartNumber, jpm IJobPartMgr) {
JobsAdmin.QueueJobParts(jpm)
//jpm.ScheduleTransfers(jm.ctx, includeTransfer, excludeTransfer)
})
}
// AllTransfersScheduled returns whether Job has completely resumed or not
func (jm *jobMgr) AllTransfersScheduled() bool {
return atomic.LoadInt32(&jm.atomicAllTransfersScheduled) == 1
}
// ConfirmAllTransfersScheduled sets the atomicAllTransfersScheduled to true
func (jm *jobMgr) ConfirmAllTransfersScheduled() {
atomic.StoreInt32(&jm.atomicAllTransfersScheduled, 1)
}
// ResetAllTransfersScheduled sets the ResetAllTransfersScheduled to false
func (jm *jobMgr) ResetAllTransfersScheduled() {
atomic.StoreInt32(&jm.atomicAllTransfersScheduled, 0)
}
// ReportJobPartDone is called to report that a job part completed or failed
func (jm *jobMgr) ReportJobPartDone() uint32 {
shouldLog := jm.ShouldLog(pipeline.LogInfo)
partsDone := atomic.AddUint32(&jm.partsDone, 1)
// If the last part is still awaited or other parts all still not complete,
// JobPart 0 status is not changed.
if partsDone != jm.jobPartMgrs.Count() || !jm.finalPartOrdered {
if shouldLog {
jm.Log(pipeline.LogInfo, fmt.Sprintf("is part of Job which %d total number of parts done ", partsDone))
}
return partsDone
}
if shouldLog {
jm.Log(pipeline.LogInfo, fmt.Sprintf("all parts of Job %s successfully completed, cancelled or paused", jm.jobID.String()))
}
jobPart0Mgr, ok := jm.jobPartMgrs.Get(0)
if !ok {
jm.Panic(fmt.Errorf("Failed to find Job %v, Part #0", jm.jobID))
}
switch part0Plan := jobPart0Mgr.Plan(); part0Plan.JobStatus() {
case common.EJobStatus.Cancelling():
part0Plan.SetJobStatus(common.EJobStatus.Cancelled())
if shouldLog {
jm.Log(pipeline.LogInfo, fmt.Sprintf("all parts of Job %v successfully cancelled; cleaning up the Job", jm.jobID))
}
//jm.jobsInfo.cleanUpJob(jm.jobID)
case common.EJobStatus.InProgress():
part0Plan.SetJobStatus((common.EJobStatus).Completed())
}
jm.chunkStatusLogger.FlushLog() // TODO: remove once we sort out what will be calling CloseLog (currently nothing)
return partsDone
}
func (jm *jobMgr) getInMemoryTransitJobState() InMemoryTransitJobState {
return jm.inMemoryTransitJobState
}
// Note: InMemoryTransitJobState should only be set when request come from cmd(FE) module to STE module.
// And the state should no more be changed inside STE module.
func (jm *jobMgr) setInMemoryTransitJobState(state InMemoryTransitJobState) {
jm.inMemoryTransitJobState = state
}
func (jm *jobMgr) Context() context.Context { return jm.ctx }
func (jm *jobMgr) Cancel() { jm.cancel() }
func (jm *jobMgr) ShouldLog(level pipeline.LogLevel) bool { return jm.logger.ShouldLog(level) }
func (jm *jobMgr) Log(level pipeline.LogLevel, msg string) { jm.logger.Log(level, msg) }
func (jm *jobMgr) PipelineLogInfo() pipeline.LogOptions {
return pipeline.LogOptions{
Log: jm.Log,
ShouldLog: func(level pipeline.LogLevel) bool { return level <= jm.logger.MinimumLogLevel() },
}
}
func (jm *jobMgr) Panic(err error) { jm.logger.Panic(err) }
func (jm *jobMgr) CloseLog() {
jm.logger.CloseLog()
jm.chunkStatusLogger.FlushLog()
}
func (jm *jobMgr) ChunkStatusLogger() common.ChunkStatusLogger {
return jm.chunkStatusLogger
}
// PartsDone returns the number of the Job's parts that are either completed or failed
//func (jm *jobMgr) PartsDone() uint32 { return atomic.LoadUint32(&jm.partsDone) }
// SetPartsDone sets the number of Job's parts that are done (completed or failed)
//func (jm *jobMgr) SetPartsDone(partsDone uint32) { atomic.StoreUint32(&jm.partsDone, partsDone) }
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
type jobPartToJobPartMgr struct {
nocopy common.NoCopy
lock sync.RWMutex
m map[PartNumber]IJobPartMgr
}
func newJobPartToJobPartMgr() jobPartToJobPartMgr {
return jobPartToJobPartMgr{m: make(map[PartNumber]IJobPartMgr)}
}
func (m *jobPartToJobPartMgr) Count() uint32 {
m.nocopy.Check()
m.lock.RLock()
count := uint32(len(m.m))
m.lock.RUnlock()
return count
}
func (m *jobPartToJobPartMgr) Set(key common.PartNumber, value IJobPartMgr) {
m.nocopy.Check()
m.lock.Lock()
m.m[key] = value
m.lock.Unlock()
}
func (m *jobPartToJobPartMgr) Get(key common.PartNumber) (value IJobPartMgr, ok bool) {
m.nocopy.Check()
m.lock.RLock()
value, ok = m.m[key]
m.lock.RUnlock()
return
}
func (m *jobPartToJobPartMgr) Delete(key common.PartNumber) {
m.nocopy.Check()
m.lock.Lock()
delete(m.m, key)
m.lock.Unlock()
}
// We purposely disallow len
func (m *jobPartToJobPartMgr) Iterate(readonly bool, f func(k common.PartNumber, v IJobPartMgr)) {
m.nocopy.Check()
locker := sync.Locker(&m.lock)
if readonly {
locker = m.lock.RLocker()
}
locker.Lock()
for k, v := range m.m {
f(k, v)
}
locker.Unlock()
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ThroughputState struct holds the attribute to monitor the through of an existing JobOrder
//type XferThroughput struct {
// lastCheckedTime time.Time
// lastCheckedBytes int64
// currentBytes int64
//}
// getLastCheckedTime api returns the lastCheckedTime of ThroughputState instance in thread-safe manner
/*func (t *XferThroughput) LastCheckedTime() time.Time { return t.lastCheckedTime }
// updateLastCheckTime api updates the lastCheckedTime of ThroughputState instance in thread-safe manner
func (t *XferThroughput) SetLastCheckTime(currentTime time.Time) { t.lastCheckedTime = currentTime }
// getLastCheckedBytes api returns the lastCheckedBytes of ThroughputState instance in thread-safe manner
func (t *XferThroughput) LastCheckedBytes() int64 { return atomic.LoadInt64(&t.lastCheckedBytes) }
// updateLastCheckedBytes api updates the lastCheckedBytes of ThroughputState instance in thread-safe manner
func (t *XferThroughput) SetLastCheckedBytes(bytes int64) {
atomic.StoreInt64(&t.lastCheckedBytes, bytes)
}
// getCurrentBytes api returns the currentBytes of ThroughputState instance in thread-safe manner
func (t *XferThroughput) CurrentBytes() int64 { return atomic.LoadInt64(&t.currentBytes) }
// updateCurrentBytes api adds the value in currentBytes of ThroughputState instance in thread-safe manner
func (t *XferThroughput) SetCurrentBytes(bytes int64) int64 {
return atomic.AddInt64(&t.currentBytes, bytes)
}
*/
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import "github.com/Azure/azure-storage-file-go/azfile"
// provide an enumerator that lists a given blob resource (could be a blob or virtual dir)
// and schedule delete transfers to remove them
// TODO consider merging with newRemoveFileEnumerator
func newRemoveBlobEnumerator(cca *cookedCopyCmdArgs) (enumerator *copyEnumerator, err error) {
sourceTraverser, err := newBlobTraverserForRemove(cca)
if err != nil {
return nil, err
}
// check if we are targeting a single blob
_, isSingleBlob := sourceTraverser.getPropertiesIfSingleBlob()
transferScheduler := newRemoveTransferProcessor(cca, NumOfFilesPerDispatchJobPart, isSingleBlob)
includeFilters := buildIncludeFilters(cca.includePatterns)
excludeFilters := buildExcludeFilters(cca.excludePatterns)
// set up the filters in the right order
filters := append(includeFilters, excludeFilters...)
finalize := func() error {
jobInitiated, err := transferScheduler.dispatchFinalPart()
if err != nil {
return err
}
if !jobInitiated {
glcm.Error("Nothing to delete. Please verify that recursive flag is set properly if targeting a directory.")
}
return nil
}
return newCopyEnumerator(sourceTraverser, filters, transferScheduler.scheduleCopyTransfer, finalize), nil
}
// provide an enumerator that lists a given Azure File resource (could be a file or dir)
// and schedule delete transfers to remove them
// note that for a directory to be removed, it has to be emptied first
func newRemoveFileEnumerator(cca *cookedCopyCmdArgs) (enumerator *copyEnumerator, err error) {
sourceTraverser, err := newFileTraverserForRemove(cca)
if err != nil {
return nil, err
}
// check if we are targeting a single blob
_, isSingleFile := sourceTraverser.getPropertiesIfSingleFile()
transferScheduler := newRemoveTransferProcessor(cca, NumOfFilesPerDispatchJobPart, isSingleFile)
includeFilters := buildIncludeFilters(cca.includePatterns)
excludeFilters := buildExcludeFilters(cca.excludePatterns)
// set up the filters in the right order
filters := append(includeFilters, excludeFilters...)
finalize := func() error {
jobInitiated, err := transferScheduler.dispatchFinalPart()
if err != nil {
return err
}
if !jobInitiated {
glcm.Error("Nothing to delete. Please verify that recursive flag is set properly if targeting a directory.")
}
return nil
}
return newCopyEnumerator(sourceTraverser, filters, transferScheduler.scheduleCopyTransfer, finalize), nil
}
type directoryStack []azfile.DirectoryURL
func (s *directoryStack) Push(d azfile.DirectoryURL) {
*s = append(*s, d)
}
func (s *directoryStack) Pop() (*azfile.DirectoryURL, bool) {
l := len(*s)
if l == 0 {
return nil, false
} else {
e := (*s)[l-1]
*s = (*s)[:l-1]
return &e, true
}
}
<file_sep>package cmd
import (
"context"
"errors"
"fmt"
"net/url"
"os"
"path/filepath"
"strings"
"github.com/Azure/azure-storage-azcopy/common"
)
type copyUploadEnumerator common.CopyJobPartOrderRequest
// this function accepts the list of files/directories to transfer and processes them
func (e *copyUploadEnumerator) enumerate(cca *cookedCopyCmdArgs) error {
util := copyHandlerUtil{}
ctx := context.TODO() // Ensure correct context is used
// attempt to parse the destination url
destinationURL, err := url.Parse(cca.destination)
// the destination should have already been validated, it would be surprising if it cannot be parsed at this point
common.PanicIfErr(err)
// list the source files and directories
listOfFilesAndDirectories, err := filepath.Glob(cca.source)
if err != nil || len(listOfFilesAndDirectories) == 0 {
return fmt.Errorf("cannot find source to upload")
}
// when a single file is being uploaded, we need to treat this case differently, as the destinationURL might be a blob
if len(listOfFilesAndDirectories) == 1 {
f, err := os.Stat(listOfFilesAndDirectories[0])
if err != nil {
return errors.New("cannot find source to upload")
}
if f.Mode().IsRegular() {
// Check if the files are passed with include flag
// then source needs to be directory, if it is a file
// then error is returned
if len(e.Include) > 0 {
return fmt.Errorf("for the use of include flag, source needs to be a directory")
}
// append file name as blob name in case the given URL is a container
if (e.FromTo == common.EFromTo.LocalBlob() && util.urlIsContainerOrShare(destinationURL)) ||
(e.FromTo == common.EFromTo.LocalFile() && util.urlIsAzureFileDirectory(ctx, destinationURL)) {
destinationURL.Path = util.generateObjectPath(destinationURL.Path, f.Name())
}
// append file name as blob name in case the given URL is a blob FS directory.
if e.FromTo == common.EFromTo.LocalBlobFS() {
// Create blob FS pipeline.
p, err := createBlobFSPipeline(ctx, e.CredentialInfo)
if err != nil {
return err
}
if util.urlIsBFSFileSystemOrDirectory(ctx, destinationURL, p) {
destinationURL.Path = util.generateObjectPath(destinationURL.Path, f.Name())
}
}
cleanedSource := strings.Replace(listOfFilesAndDirectories[0], common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
err = e.addTransfer(common.CopyTransfer{
Source: cleanedSource,
Destination: destinationURL.String(),
LastModifiedTime: f.ModTime(),
SourceSize: f.Size(),
}, cca)
if err != nil {
return err
}
return e.dispatchFinalPart(cca)
}
}
// if the user specifies a virtual directory ex: /container_name/extra_path
// then we should extra_path as a prefix while uploading
// temporarily save the path of the container
cleanContainerPath := destinationURL.Path
// If the user has provided the listofFiles explicitly to copy, there is no
// need to glob the source and match the patterns.
// This feature is supported only for Storage Explorer and doesn't follow the symlinks.
if len(cca.listOfFilesToCopy) > 0 {
for _, file := range cca.listOfFilesToCopy {
tempDestinationURl := *destinationURL
parentSourcePath, _ := util.getRootPathWithoutWildCards(cca.source)
if len(parentSourcePath) > 0 && parentSourcePath[len(parentSourcePath)-1] == common.AZCOPY_PATH_SEPARATOR_CHAR {
parentSourcePath = parentSourcePath[:len(parentSourcePath)-1]
}
filePath := fmt.Sprintf("%s%s%s", parentSourcePath, common.AZCOPY_PATH_SEPARATOR_STRING, file)
f, err := os.Stat(filePath)
if err != nil {
glcm.Info(fmt.Sprintf("Error getting the fileInfo for file %s. failed with error %s", filePath, err.Error()))
continue
}
if f.Mode().IsRegular() {
// replace the parent source path in the filePath to to ensure the correct path mentioned in the list of flags.
// For Example: Source = /home/user/dir list-of-files = dir2/1.txt;dir2/2.txt dst = https://container/dir2/1.txt
// and https://container/dir2/2.txt
relativePath := strings.Replace(filePath, parentSourcePath, "", 1)
if len(relativePath) > 0 && relativePath[0] == common.AZCOPY_PATH_SEPARATOR_CHAR {
relativePath = relativePath[1:]
}
// If the file is a regular file, calculate the destination path and queue for transfer.
tempDestinationURl.Path = util.generateObjectPath(tempDestinationURl.Path, relativePath)
err = e.addTransfer(common.CopyTransfer{
Source: filePath,
Destination: tempDestinationURl.String(),
LastModifiedTime: f.ModTime(),
SourceSize: f.Size(),
}, cca)
if err != nil {
glcm.Info(fmt.Sprintf("error %s adding source %s and destination %s as a transfer", err.Error(), filePath, destinationURL))
}
continue
}
// If the last character of the filePath is a path separator, strip the path separator.
if len(filePath) > 0 && filePath[len(filePath)-1] == common.AZCOPY_PATH_SEPARATOR_CHAR {
filePath = filePath[:len(filePath)-1]
}
if f.IsDir() && cca.recursive {
// If the file is a directory, walk through all the elements inside the directory and queue the elements for transfer.
filepath.Walk(filePath, func(pathToFile string, info os.FileInfo, err error) error {
if err != nil {
glcm.Info(fmt.Sprintf("Accessing %s failed with error %s", pathToFile, err.Error()))
return nil
}
if info.IsDir() {
return nil
} else if info.Mode().IsRegular() { // If the resource is file
// replace the OS path separator in pathToFile string with AZCOPY_PATH_SEPARATOR
// this replacement is done to handle the windows file paths where path separator "\\"
pathToFile = strings.Replace(pathToFile, common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
// replace the OS path separator in fileOrDirectoryPath string with AZCOPY_PATH_SEPARATOR
// this replacement is done to handle the windows file paths where path separator "\\"
filePath = strings.Replace(filePath, common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
// upload the files
// the path in the blob name started at the given fileOrDirectoryPath
// example: fileOrDirectoryPath = "/dir1/dir2/dir3" pathToFile = "/dir1/dir2/dir3/file1.txt" result = "dir3/file1.txt"
tempDestinationURl.Path = util.generateObjectPath(cleanContainerPath,
util.getRelativePath(filePath, pathToFile))
err = e.addTransfer(common.CopyTransfer{
Source: pathToFile,
Destination: tempDestinationURl.String(),
LastModifiedTime: info.ModTime(),
SourceSize: info.Size(),
}, cca)
if err != nil {
return err
}
}
return nil
})
}
}
if e.PartNum == 0 && len(e.Transfers) == 0 {
return errors.New("nothing can be uploaded, please use --recursive to upload directories")
}
return e.dispatchFinalPart(cca)
}
// Get the source path without the wildcards
// This is defined since the files mentioned with exclude flag
// & include flag are relative to the Source
// If the source has wildcards, then files are relative to the
// parent source path which is the path of last directory in the source
// without wildcards
// For Example: src = "/home/user/dir1" parentSourcePath = "/home/user/dir1"
// For Example: src = "/home/user/dir*" parentSourcePath = "/home/user"
// For Example: src = "/home/*" parentSourcePath = "/home"
parentSourcePath := cca.source
wcIndex := util.firstIndexOfWildCard(parentSourcePath)
if wcIndex != -1 {
parentSourcePath = parentSourcePath[:wcIndex]
pathSepIndex := strings.LastIndex(parentSourcePath, common.AZCOPY_PATH_SEPARATOR_STRING)
parentSourcePath = parentSourcePath[:pathSepIndex]
}
// walk through every file and directory
// upload every file
// upload directory recursively if recursive option is on
for _, fileOrDirectoryPath := range listOfFilesAndDirectories {
f, err := os.Stat(fileOrDirectoryPath)
if err == nil {
// directories are uploaded only if recursive is on
if f.IsDir() && cca.recursive {
// walk goes through the entire directory tree
filepath.Walk(fileOrDirectoryPath, func(pathToFile string, f os.FileInfo, err error) error {
if err != nil {
glcm.Info(fmt.Sprintf("Accessing %s failed with error %s", pathToFile, err.Error()))
return nil
}
if f.IsDir() {
// For Blob and Azure Files, empty directories are not uploaded
// For BlobFs, empty directories are to be uploaded as well
// If the directory is not empty, then uploading a file inside the directory path
// will create the parent directory of file, so transfer is not required to create
// a directory
// TODO: Currently not implemented the upload of empty directories for BlobFS
return nil
} else if f.Mode().IsRegular() { // If the resource is file
// replace the OS path separator in pathToFile string with AZCOPY_PATH_SEPARATOR
// this replacement is done to handle the windows file paths where path separator "\\"
pathToFile = strings.Replace(pathToFile, common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
// replace the OS path separator in fileOrDirectoryPath string with AZCOPY_PATH_SEPARATOR
// this replacement is done to handle the windows file paths where path separator "\\"
fileOrDirectoryPath = strings.Replace(fileOrDirectoryPath, common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
// check if the should be included or not
if !util.resourceShouldBeIncluded(parentSourcePath, e.Include, pathToFile) {
return nil
}
// Check if the file should be excluded or not.
if util.resourceShouldBeExcluded(parentSourcePath, e.Exclude, pathToFile) {
return nil
}
// upload the files
// the path in the blob name started at the given fileOrDirectoryPath
// example: fileOrDirectoryPath = "/dir1/dir2/dir3" pathToFile = "/dir1/dir2/dir3/file1.txt" result = "dir3/file1.txt"
destinationURL.Path = util.generateObjectPath(cleanContainerPath,
util.getRelativePath(fileOrDirectoryPath, pathToFile))
err = e.addTransfer(common.CopyTransfer{
Source: pathToFile,
Destination: destinationURL.String(),
LastModifiedTime: f.ModTime(),
SourceSize: f.Size(),
}, cca)
if err != nil {
return err
}
} else if f.Mode()&os.ModeSymlink != 0 {
// If follow symlink is set to false, then symlinks are not evaluated.
if !cca.followSymlinks {
return nil
}
// evaluate the symlinkPath.
evaluatedSymlinkPath, err := util.evaluateSymlinkPath(pathToFile)
if err != nil {
glcm.Info(fmt.Sprintf("error evaluating the symlink path %s", evaluatedSymlinkPath))
return nil
}
// If the path is a windows file system path, replace '\\' with '/'
// to maintain the consistency with other system paths.
if common.OS_PATH_SEPARATOR == "\\" {
pathToFile = strings.Replace(pathToFile, common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
}
err = e.getSymlinkTransferList(evaluatedSymlinkPath, pathToFile, parentSourcePath, cleanContainerPath, destinationURL, cca)
if err != nil {
glcm.Info(fmt.Sprintf("error %s evaluating the symlinkPath %s", err.Error(), evaluatedSymlinkPath))
}
}
return nil
})
} else if f.Mode().IsRegular() {
// replace the OS path separator in fileOrDirectoryPath string with AZCOPY_PATH_SEPARATOR
// this replacement is done to handle the windows file paths where path separator "\\"
fileOrDirectoryPath = strings.Replace(fileOrDirectoryPath, common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
// check if the should be included or not
if !util.resourceShouldBeIncluded(parentSourcePath, e.Include, fileOrDirectoryPath) {
continue
}
// Check if the file should be excluded or not.
if util.resourceShouldBeExcluded(parentSourcePath, e.Exclude, fileOrDirectoryPath) {
continue
}
// files are uploaded using their file name as blob name
destinationURL.Path = util.generateObjectPath(cleanContainerPath, f.Name())
err = e.addTransfer(common.CopyTransfer{
Source: fileOrDirectoryPath,
Destination: destinationURL.String(),
LastModifiedTime: f.ModTime(),
SourceSize: f.Size(),
}, cca)
if err != nil {
return err
}
}
} else {
glcm.Info(fmt.Sprintf("error %s accessing the filepath %s", err.Error(), fileOrDirectoryPath))
}
}
if e.PartNum == 0 && len(e.Transfers) == 0 {
return errors.New("nothing can be uploaded, please use --recursive to upload directories")
}
return e.dispatchFinalPart(cca)
}
// getSymlinkTransferList api scans all the elements inside the symlinkPath and enumerates the transfers.
// If there exists a symlink in the given symlinkPath, it recursively scans it and enumerate the transfer.
// The path of the files in the symlinkPath will be relative to the original path.
// Example 1: C:\MountedD is a symlink to D: and D: contains file1, file2.
// The destination for file1, file2 remotely will be MountedD/file1, MountedD/file2.
// Example 2. If there exists a symlink inside the D: "D:\MountecF" pointing to F: and there exists
// ffile1, ffile2, then destination for ffile1, ffile2 remotely will be MountedD/MountedF/ffile1 and
// MountedD/MountedF/ffile2
func (e *copyUploadEnumerator) getSymlinkTransferList(symlinkPath, source, parentSource, cleanContainerPath string,
destinationUrl *url.URL, cca *cookedCopyCmdArgs) error {
util := copyHandlerUtil{}
// replace the "\\" path separator with "/" separator
symlinkPath = strings.Replace(symlinkPath, common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
// Glob the evaluated symlinkPath and iterate through each files and sub-directories.
listOfFilesDirs, err := filepath.Glob(symlinkPath)
if err != nil {
return fmt.Errorf(fmt.Sprintf("found cycle in symlink path %s", symlinkPath))
}
for _, files := range listOfFilesDirs {
// replace the windows path separator in the path with "/" path separator
files = strings.Replace(files, common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
fInfo, err := os.Stat(files)
if err != nil {
glcm.Info(fmt.Sprintf("error %s fetching the fileInfo for filePath %s", err.Error(), files))
} else if fInfo.IsDir() {
filepath.Walk(files, func(path string, fileInfo os.FileInfo, err error) error {
if err != nil {
glcm.Info(err.Error())
return nil
} else if fileInfo.IsDir() {
return nil
} else if fileInfo.Mode().IsRegular() { // If the file is a regular file i.e not a directory and symlink.
// replace the windows path separator in the path with "/" path separator
path = strings.Replace(path, common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
// strip the original symlink path from the filePath
// For Example: C:\MountedD points to D:\ and path is D:\file1
// relativePath = file1
relativePath := strings.Replace(path, symlinkPath, "", 1)
// If there exists a path separator at the start of the relative path, then remove the path separator
if len(relativePath) > 0 && relativePath[0] == common.AZCOPY_PATH_SEPARATOR_CHAR {
relativePath = relativePath[1:]
}
var sourcePath = ""
// concatenate the relative symlink path to the original source path
// For Example: C:\MountedD points to D:\ and path is D:\file1
// sourcePath = c:\MounteD\file1
if len(source) > 0 && source[len(source)-1] == common.AZCOPY_PATH_SEPARATOR_CHAR {
sourcePath = fmt.Sprintf("%s%s", source, relativePath)
} else {
sourcePath = fmt.Sprintf("%s%s%s", source, common.AZCOPY_PATH_SEPARATOR_STRING, relativePath)
}
// check if the sourcePath needs to be include or not
if !util.resourceShouldBeIncluded(parentSource, e.Include, sourcePath) {
return nil
}
// check if the source has to be excluded or not
if util.resourceShouldBeExcluded(parentSource, e.Exclude, sourcePath) {
return nil
}
// create the transfer and add to the list
destinationUrl.Path = util.generateObjectPath(cleanContainerPath,
util.getRelativePath(parentSource, sourcePath))
transfer := common.CopyTransfer{
Source: path,
Destination: destinationUrl.String(),
LastModifiedTime: fileInfo.ModTime(),
SourceSize: fileInfo.Size(),
}
err = e.addTransfer(transfer, cca)
if err != nil {
glcm.Info(fmt.Sprintf("error %s adding the transfer source %s and destination %s", err.Error(), path, destinationUrl.String()))
}
return nil
} else if fileInfo.Mode()&os.ModeSymlink != 0 { // If the file is a symlink
// replace the windows path separator in the path with "/" path separator
path = strings.Replace(path, common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
// Evaulate the symlink path
sLinkPath, err := util.evaluateSymlinkPath(path)
if err != nil {
glcm.Info(fmt.Sprintf("error %s evaluating the symlink path %s ", err.Error(), path))
return nil
}
// strip the original symlink path and concatenate the relativePath to the original sourcePath
// for Example: source = C:\MountedD sLinkPath = D:\MountedE
// relativePath = MountedE , sourcePath = C;\MountedD\MountedE
relativePath := strings.Replace(path, symlinkPath, "", 1)
// If char of relative Path is the path separator, strip the path separator
if len(relativePath) > 0 && relativePath[0] == common.AZCOPY_PATH_SEPARATOR_CHAR {
relativePath = relativePath[1:]
}
var sourcePath = ""
// concatenate the relative symlink path to the original source
if len(source) > 0 && source[len(source)-1] == common.AZCOPY_PATH_SEPARATOR_CHAR {
sourcePath = fmt.Sprintf("%s%s", source, relativePath)
} else {
sourcePath = fmt.Sprintf("%s%s%s", source, common.AZCOPY_PATH_SEPARATOR_STRING, relativePath)
}
err = e.getSymlinkTransferList(sLinkPath, sourcePath,
parentSource, cleanContainerPath, destinationUrl, cca)
if err != nil {
glcm.Info(fmt.Sprintf("error %s iterating through the symlink %s", err.Error(), sLinkPath))
}
}
return nil
})
} else if fInfo.Mode().IsRegular() {
// strip the original symlink path
relativePath := strings.Replace(files, symlinkPath, "", 1)
// concatenate the path to the parent source
var sourcePath = ""
if len(source) > 0 && source[len(source)-1] == common.AZCOPY_PATH_SEPARATOR_CHAR {
sourcePath = fmt.Sprintf("%s%s", source, relativePath)
} else {
sourcePath = fmt.Sprintf("%s%s%s", source, common.AZCOPY_PATH_SEPARATOR_STRING, relativePath)
}
// check if the sourcePath needs to be include or not
if !util.resourceShouldBeIncluded(parentSource, e.Include, sourcePath) {
continue
}
// check if the source has to be excluded or not
if util.resourceShouldBeExcluded(parentSource, e.Exclude, sourcePath) {
continue
}
// create the transfer and add to the list
destinationUrl.Path = util.generateObjectPath(cleanContainerPath,
util.getRelativePath(source, sourcePath))
transfer := common.CopyTransfer{
Source: files,
Destination: destinationUrl.String(),
LastModifiedTime: fInfo.ModTime(),
SourceSize: fInfo.Size(),
}
err = e.addTransfer(transfer, cca)
if err != nil {
glcm.Info(fmt.Sprintf("error %s adding the transfer source %s and destination %s", err.Error(), files, destinationUrl.String()))
}
} else {
continue
}
}
return nil
}
func (e *copyUploadEnumerator) addTransfer(transfer common.CopyTransfer, cca *cookedCopyCmdArgs) error {
return addTransfer((*common.CopyJobPartOrderRequest)(e), transfer, cca)
}
func (e *copyUploadEnumerator) dispatchFinalPart(cca *cookedCopyCmdArgs) error {
return dispatchFinalPart((*common.CopyJobPartOrderRequest)(e), cca)
}
<file_sep>import utility as util
import unittest
class PageBlob_Upload_User_Scenarios(unittest.TestCase):
# util_test_page_blob_upload_1mb verifies the azcopy upload of 1mb file
# as a page blob.
def util_test_page_blob_upload_1mb(self, use_oauth=False):
# create the test gile.
file_name = "test_page_blob_1mb.vhd"
file_path = util.create_test_file(file_name, 1024 * 1024)
# execute azcopy upload.
if not use_oauth:
dest = util.get_resource_sas(file_name)
dest_validate = dest
else:
dest = util.get_resource_from_oauth_container(file_name)
dest_validate = util.get_resource_from_oauth_container_validate(file_name)
result = util.Command("copy").add_arguments(file_path).add_arguments(dest).add_flags("log-level", "info"). \
add_flags("block-size-mb", "4").add_flags("blob-type","PageBlob").execute_azcopy_copy_command()
self.assertTrue(result)
# execute validator.
result = util.Command("testBlob").add_arguments(file_path).add_arguments(dest_validate).\
add_flags("blob-type","PageBlob").execute_azcopy_verify()
self.assertTrue(result)
# test_page_blob_upload_1mb_with_sas verifies the azcopy upload of 1mb file
# as a page blob with sas.
def test_page_blob_upload_1mb_with_sas(self):
self.util_test_page_blob_upload_1mb(False)
# test_page_blob_upload_1mb_with_oauth verifies the azcopy upload of 1mb file
# as a page blob with oauth.
def test_page_blob_upload_1mb_with_oauth(self):
self.util_test_page_blob_upload_1mb(True)
# test_page_range_for_complete_sparse_file verifies the number of Page ranges for
# complete empty file i.e each character is Null character.
def test_page_range_for_complete_sparse_file(self):
# create test file.
file_name = "sparse_file.vhd"
file_path = util.create_complete_sparse_file(file_name, 4 * 1024 * 1024)
# execute azcopy page blob upload.
destination_sas = util.get_resource_sas(file_name)
result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas).add_flags("log-level", "info"). \
add_flags("block-size-mb", "4").add_flags("blob-type","PageBlob").execute_azcopy_copy_command()
self.assertTrue(result)
# execute validator.
# no of page ranges should be 0 for the empty sparse file.
result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas).\
add_flags("blob-type","PageBlob").add_flags("verify-block-size", "true").\
add_flags("number-blocks-or-pages", "0").execute_azcopy_verify()
self.assertTrue(result)
# test_page_blob_upload_partial_sparse_file verifies the number of page ranges
# for PageBlob upload by azcopy.
def test_page_blob_upload_partial_sparse_file(self):
# create test file.
file_name = "test_partial_sparse_file.vhd"
file_path = util.create_partial_sparse_file(file_name, 16 * 1024 * 1024)
# execute azcopy pageblob upload.
destination_sas = util.get_resource_sas(file_name)
result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas).add_flags("log-level", "info"). \
add_flags("block-size-mb", "4").add_flags("blob-type","PageBlob").execute_azcopy_copy_command()
self.assertTrue(result)
# number of page range for partial sparse created above will be (size/2)
number_of_page_ranges = int((16 * 1024 * 1024 / (4 * 1024 * 1024)) / 2)
# execute validator to verify the number of page range for uploaded blob.
result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("blob-type", "PageBlob").add_flags("verify-block-size", "true"). \
add_flags("number-blocks-or-pages", str(number_of_page_ranges)).execute_azcopy_verify()
self.assertTrue(result)
def test_set_page_blob_tier(self):
# test for P10 Page Blob Access Tier
filename = "test_page_P10_blob_tier.vhd"
file_path = util.create_test_file(filename, 100 * 1024)
destination_sas = util.get_resource_sas_from_premium_container_sas(filename)
result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("log-level", "info").add_flags("blob-type","PageBlob").add_flags("page-blob-tier", "P10").execute_azcopy_copy_command()
self.assertTrue(result)
# execute azcopy validate order.
# added the expected blob-tier "P10"
result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas).\
add_flags("blob-type","PageBlob"). add_flags("blob-tier", "P10").execute_azcopy_verify()
self.assertTrue(result)
# test for P20 Page Blob Access Tier
filename = "test_page_P20_blob_tier.vhd"
file_path = util.create_test_file(filename, 100 * 1024)
destination_sas = util.get_resource_sas_from_premium_container_sas(filename)
result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("log-level", "info").add_flags("blob-type","PageBlob").add_flags("page-blob-tier", "P20").execute_azcopy_copy_command()
self.assertTrue(result)
# execute azcopy validate order.
# added the expected blob-tier "P20"
result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas).\
add_flags("blob-type","PageBlob") .add_flags("blob-tier", "P20").execute_azcopy_verify()
self.assertTrue(result)
# test for P30 Page Blob Access Tier
filename = "test_page_P30_blob_tier.vhd"
file_path = util.create_test_file(filename, 100 * 1024)
destination_sas = util.get_resource_sas_from_premium_container_sas(filename)
result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("log-level", "info").add_flags("blob-type","PageBlob").add_flags("page-blob-tier", "P30").execute_azcopy_copy_command()
self.assertTrue(result)
# execute azcopy validate order.
# added the expected blob-tier "P30"
result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("blob-type", "PageBlob").add_flags("blob-tier", "P30").execute_azcopy_verify()
self.assertTrue(result)
# test for P4 Page Blob Access Tier
filename = "test_page_P4_blob_tier.vhd"
file_path = util.create_test_file(filename, 100 * 1024)
destination_sas = util.get_resource_sas_from_premium_container_sas(filename)
result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("log-level", "info").add_flags("blob-type","PageBlob").add_flags("page-blob-tier", "P4").execute_azcopy_copy_command()
self.assertTrue(result)
# execute azcopy validate order.
# added the expected blob-tier "P4"
result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("blob-type", "PageBlob").add_flags("blob-tier", "P4").execute_azcopy_verify()
self.assertTrue(result)
# test for P40 Page Blob Access Tier
filename = "test_page_P40_blob_tier.vhd"
file_path = util.create_test_file(filename, 100 * 1024)
destination_sas = util.get_resource_sas_from_premium_container_sas(filename)
result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("log-level", "info").add_flags("blob-type","PageBlob").add_flags("page-blob-tier", "P40").execute_azcopy_copy_command()
self.assertTrue(result)
# execute azcopy validate order.
# added the expected blob-tier "P40"
result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("blob-type", "PageBlob").add_flags("blob-tier", "P40").execute_azcopy_verify()
self.assertTrue(result)
# test for P50 Page Blob Access Tier
filename = "test_page_P50_blob_tier.vhd"
file_path = util.create_test_file(filename, 100 * 1024)
destination_sas = util.get_resource_sas_from_premium_container_sas(filename)
result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("log-level", "info").add_flags("blob-type","PageBlob").add_flags("page-blob-tier", "P50").execute_azcopy_copy_command()
self.assertTrue(result)
# execute azcopy validate order.
# added the expected blob-tier "P50"
result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("blob-type", "PageBlob").add_flags("blob-tier", "P50").execute_azcopy_verify()
self.assertTrue(result)
# test for P6 Page Blob Access Tier
filename = "test_page_P6_blob_tier.vhd"
file_path = util.create_test_file(filename, 100 * 1024)
destination_sas = util.get_resource_sas_from_premium_container_sas(filename)
result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("log-level", "info").add_flags("blob-type","PageBlob").add_flags("page-blob-tier", "P6").execute_azcopy_copy_command()
self.assertTrue(result)
# execute azcopy validate order.
# added the expected blob-tier "P50"
result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas). \
add_flags("blob-type", "PageBlob").add_flags("blob-tier", "P6").execute_azcopy_verify()
self.assertTrue(result)<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"crypto/md5"
"errors"
"fmt"
"hash"
"os"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
)
// anyToRemote handles all kinds of sender operations - both uploads from local files, and S2S copies
func anyToRemote(jptm IJobPartTransferMgr, p pipeline.Pipeline, pacer *pacer, senderFactory senderFactory, sipf sourceInfoProviderFactory) {
info := jptm.Info()
srcSize := info.SourceSize
// step 1. perform initial checks
if jptm.WasCanceled() {
jptm.ReportTransferDone()
return
}
// step 2a. Create sender
srcInfoProvider, err := sipf(jptm)
if err != nil {
jptm.LogSendError(info.Source, info.Destination, err.Error(), 0)
jptm.SetStatus(common.ETransferStatus.Failed())
jptm.ReportTransferDone()
return
}
s, err := senderFactory(jptm, info.Destination, p, pacer, srcInfoProvider)
if err != nil {
jptm.LogSendError(info.Source, info.Destination, err.Error(), 0)
jptm.SetStatus(common.ETransferStatus.Failed())
jptm.ReportTransferDone()
return
}
// step 2b. Read chunk size and count from the sender (since it may have applied its own defaults and/or calculations to produce these values
numChunks := s.NumChunks()
if jptm.ShouldLog(pipeline.LogInfo) {
jptm.LogTransferStart(info.Source, info.Destination, fmt.Sprintf("Specified chunk size %d", s.ChunkSize()))
}
if s.NumChunks() == 0 {
panic("must always schedule one chunk, even if file is empty") // this keeps our code structure simpler, by using a dummy chunk for empty files
}
// step 3: Check overwrite
// If the force Write flags is set to false
// then check the file exists at the remote location
// If it does, mark transfer as failed.
if !jptm.IsForceWriteTrue() {
exists, existenceErr := s.RemoteFileExists()
if existenceErr != nil {
jptm.LogSendError(info.Source, info.Destination, "Could not check file existence. "+existenceErr.Error(), 0)
jptm.SetStatus(common.ETransferStatus.Failed()) // is a real failure, not just a FileAlreadyExists, in this case
jptm.ReportTransferDone()
return
}
if exists {
jptm.LogSendError(info.Source, info.Destination, "File already exists", 0)
jptm.SetStatus(common.ETransferStatus.FileAlreadyExistsFailure()) // TODO: question: is it OK to always use FileAlreadyExists here, instead of BlobAlreadyExists, even when saving to blob storage? I.e. do we really need a different error for blobs?
jptm.ReportTransferDone()
return
}
}
// step 4: Open the local Source File (if any)
var sourceFileFactory func() (common.CloseableReaderAt, error)
srcFile := (common.CloseableReaderAt)(nil)
if srcInfoProvider.IsLocal() {
sourceFileFactory = func() (common.CloseableReaderAt, error) {
return openSourceFile(info)
}
srcFile, err = sourceFileFactory()
if err != nil {
jptm.LogSendError(info.Source, info.Destination, "Couldn't open source-"+err.Error(), 0)
jptm.SetStatus(common.ETransferStatus.Failed())
jptm.ReportTransferDone()
return
}
defer srcFile.Close() // we read all the chunks in this routine, so can close the file at the end
}
// Do LMT verfication before transfer, when:
// 1) Source is local, so get source file's LMT is free.
// 2) Source is remote, i.e. S2S copy case. And source's size is larger than one chunk. So verification can possibly save transfer's cost.
if copier, isS2SCopier := s.(s2sCopier); srcInfoProvider.IsLocal() ||
(isS2SCopier && info.S2SSourceChangeValidation && srcSize > int64(copier.ChunkSize())) {
lmt, err := srcInfoProvider.GetLastModifiedTime()
if err != nil {
jptm.LogSendError(info.Source, info.Destination, "Couldn't get source's last modified time-"+err.Error(), 0)
jptm.SetStatus(common.ETransferStatus.Failed())
jptm.ReportTransferDone()
return
}
if lmt.UTC() != jptm.LastModifiedTime().UTC() {
jptm.LogSendError(info.Source, info.Destination, "File modified since transfer scheduled", 0)
jptm.SetStatus(common.ETransferStatus.Failed())
jptm.ReportTransferDone()
return
}
}
// *****
// Error-handling rules change here.
// ABOVE this point, we end the transfer using the code as shown above
// BELOW this point, this routine always schedules the expected number
// of chunks, even if it has seen a failure, and the
// workers (the chunkfunc implementations) must use
// jptm.FailActiveSend when there's an error)
// TODO: are we comfortable with this approach?
// DECISION: 16 Jan, 2019: for now, we are leaving in place the above rule than number of of completed chunks must
// eventually reach numChunks, since we have no better short-term alternative.
// ******
// step 5: tell jptm what to expect, and how to clean up at the end
jptm.SetNumberOfChunks(numChunks)
jptm.SetActionAfterLastChunk(func() { epilogueWithCleanupSendToRemote(jptm, s, srcInfoProvider) })
// Step 6: Go through the file and schedule chunk messages to send each chunk
scheduleSendChunks(jptm, info.Source, srcFile, srcSize, s, sourceFileFactory, srcInfoProvider)
}
func openSourceFile(info TransferInfo) (common.CloseableReaderAt, error) {
if common.IsPlaceholderForRandomDataGenerator(info.Source) {
// Generate a "file" of random data. Useful for testing when you want really big files, but don't want
// to make them yourself
return common.NewRandomDataGenerator(info.SourceSize), nil
} else {
return os.Open(info.Source)
}
}
// Schedule all the send chunks.
// For upload, we force preload of each chunk to memory, and we wait (block)
// here if the amount of preloaded data gets excessive. That's OK to do,
// because if we already have that much data preloaded (and scheduled for sending in
// chunks) then we don't need to schedule any more chunks right now, so the blocking
// is harmless (and a good thing, to avoid excessive RAM usage).
// To take advantage of the good sequential read performance provided by many file systems,
// and to be able to compute an MD5 hash for the file, we work sequentially through the file here.
func scheduleSendChunks(jptm IJobPartTransferMgr, srcPath string, srcFile common.CloseableReaderAt, srcSize int64, s ISenderBase, sourceFileFactory common.ChunkReaderSourceFactory, srcInfoProvider ISourceInfoProvider) {
// For generic send
chunkSize := s.ChunkSize()
numChunks := s.NumChunks()
// For upload
var md5Channel chan<- []byte
var prefetchErr error
var chunkReader common.SingleChunkReader
ps := common.PrologueState{}
var md5Hasher hash.Hash
if jptm.ShouldPutMd5() {
md5Hasher = md5.New()
} else {
md5Hasher = common.NewNullHasher()
}
safeToUseHash := true
if srcInfoProvider.IsLocal() {
md5Channel = s.(uploader).Md5Channel()
defer close(md5Channel)
}
chunkIDCount := int32(0)
for startIndex := int64(0); startIndex < srcSize || isDummyChunkInEmptyFile(startIndex, srcSize); startIndex += int64(chunkSize) {
id := common.NewChunkID(srcPath, startIndex)
adjustedChunkSize := int64(chunkSize)
// compute actual size of the chunk
if startIndex+int64(chunkSize) > srcSize {
adjustedChunkSize = srcSize - startIndex
}
if srcInfoProvider.IsLocal() {
// create reader and prefetch the data into it
chunkReader = createPopulatedChunkReader(jptm, sourceFileFactory, id, adjustedChunkSize, srcFile)
// Wait until we have enough RAM, and when we do, prefetch the data for this chunk.
prefetchErr = chunkReader.BlockingPrefetch(srcFile, false)
if prefetchErr == nil {
chunkReader.WriteBufferTo(md5Hasher)
ps = chunkReader.GetPrologueState()
} else {
safeToUseHash = false // because we've missed a chunk
}
}
// If this is the the very first chunk, do special init steps
if startIndex == 0 {
// Run prologue before first chunk is scheduled.
// If file is not local, we'll get no leading bytes, but we still run the prologue in case
// there's other initialization to do in the sender.
s.Prologue(ps)
}
// schedule the chunk job/msg
jptm.LogChunkStatus(id, common.EWaitReason.WorkerGR())
isWholeFile := numChunks == 1
var cf chunkFunc
if srcInfoProvider.IsLocal() {
if prefetchErr == nil {
cf = s.(uploader).GenerateUploadFunc(id, chunkIDCount, chunkReader, isWholeFile)
} else {
_ = chunkReader.Close()
// Our jptm logic currently requires us to schedule every chunk, even if we know there's an error,
// so we schedule a func that will just fail with the given error
cf = createSendToRemoteChunkFunc(jptm, id, func() { jptm.FailActiveSend("chunk data read", prefetchErr) })
}
} else {
cf = s.(s2sCopier).GenerateCopyFunc(id, chunkIDCount, adjustedChunkSize, isWholeFile)
}
jptm.ScheduleChunks(cf)
chunkIDCount++
}
// sanity check to verify the number of chunks scheduled
if chunkIDCount != int32(numChunks) {
panic(fmt.Errorf("difference in the number of chunk calculated %v and actual chunks scheduled %v for src %s of size %v", numChunks, chunkIDCount, srcPath, srcSize))
}
if srcInfoProvider.IsLocal() && safeToUseHash {
md5Channel <- md5Hasher.Sum(nil)
}
}
// Make reader for this chunk.
// Each chunk reader also gets a factory to make a reader for the file, in case it needs to repeat its part
// of the file read later (when doing a retry)
// BTW, the reader we create here just works with a single chuck. (That's in contrast with downloads, where we have
// to use an object that encompasses the whole file, so that it can put the chunks back into order. We don't have that requirement here.)
func createPopulatedChunkReader(jptm IJobPartTransferMgr, sourceFileFactory common.ChunkReaderSourceFactory, id common.ChunkID, adjustedChunkSize int64, srcFile common.CloseableReaderAt) common.SingleChunkReader {
chunkReader := common.NewSingleChunkReader(jptm.Context(),
sourceFileFactory,
id,
adjustedChunkSize,
jptm.ChunkStatusLogger(),
jptm,
jptm.SlicePool(),
jptm.CacheLimiter())
return chunkReader
}
func isDummyChunkInEmptyFile(startIndex int64, fileSize int64) bool {
return startIndex == 0 && fileSize == 0
}
// Complete epilogue. Handles both success and failure.
func epilogueWithCleanupSendToRemote(jptm IJobPartTransferMgr, s ISenderBase, sip ISourceInfoProvider) {
if jptm.TransferStatus() > 0 {
if _, isS2SCopier := s.(s2sCopier); sip.IsLocal() || (isS2SCopier && jptm.Info().S2SSourceChangeValidation) {
// Check the source to see if it was changed during transfer. If it was, mark the transfer as failed.
lmt, err := sip.GetLastModifiedTime()
if err != nil {
jptm.FailActiveSend("epilogueWithCleanupSendToRemote", err)
}
if lmt.UTC() != jptm.LastModifiedTime().UTC() {
jptm.FailActiveSend("epilogueWithCleanupSendToRemote", errors.New("source modified during transfer"))
}
}
}
s.Epilogue()
// TODO: finalize and wrap in functions whether 0 is included or excluded in status comparisons
if jptm.TransferStatus() == 0 {
panic("think we're finished but status is notStarted")
}
// note that we do not really know whether the context was canceled because of an error, or because the user asked for it
// if was an intentional cancel, the status is still "in progress", so we are still counting it as pending
// we leave these transfer status alone
// in case of errors, the status was already set, so we don't need to do anything here either
//
// it is entirely possible that all the chunks were finished, but then by the time we get to this line
// the context is canceled. In this case, a completely transferred file would not be marked "completed".
// it's definitely a case that we should be aware of, but given how rare it is, and how low the impact (the user can just resume), we don't have to do anything more to it atm.
if jptm.TransferStatus() > 0 && !jptm.WasCanceled() {
// We know all chunks are done (because this routine was called)
// and we know the transfer didn't fail (because just checked its status above and made sure the context was not canceled),
// so it must have succeeded. So make sure its not left "in progress" state
jptm.SetStatus(common.ETransferStatus.Success())
// Final logging
if jptm.ShouldLog(pipeline.LogInfo) { // TODO: question: can we remove these ShouldLogs? Aren't they inside Log?
if _, ok := s.(s2sCopier); ok {
jptm.Log(pipeline.LogInfo, "COPY SUCCESSFUL")
} else if _, ok := s.(uploader); ok {
jptm.Log(pipeline.LogInfo, "UPLOAD SUCCESSFUL")
} else {
panic("invalid state: epilogueWithCleanupSendToRemote should be used by COPY and UPLOAD")
}
}
if jptm.ShouldLog(pipeline.LogDebug) {
jptm.Log(pipeline.LogDebug, "Finalizing Transfer")
}
} else {
if jptm.ShouldLog(pipeline.LogDebug) {
jptm.Log(pipeline.LogDebug, "Finalizing Transfer Cancellation/Failure")
}
}
// successful or unsuccessful, it's definitely over
jptm.ReportTransferDone()
}
<file_sep>import json
import os
import shutil
import time
import urllib
from collections import namedtuple
import utility as util
import unittest
# Temporary tests (mostly copy-pasted from blob tests) to guarantee simple sync scenarios still work
# TODO Replace with better tests in the future
class Blob_Sync_User_Scenario(unittest.TestCase):
def test_sync_single_blob(self):
# create file of size 1KB.
filename = "test_1kb_blob_sync.txt"
file_path = util.create_test_file(filename, 1024)
blob_path = util.get_resource_sas(filename)
# Upload 1KB file using azcopy.
src = file_path
dest = blob_path
result = util.Command("cp").add_arguments(src).add_arguments(dest). \
add_flags("log-level", "info").execute_azcopy_copy_command()
self.assertTrue(result)
# Verifying the uploaded blob.
# the resource local path should be the first argument for the azcopy validator.
# the resource sas should be the second argument for azcopy validator.
resource_url = util.get_resource_sas(filename)
result = util.Command("testBlob").add_arguments(file_path).add_arguments(resource_url).execute_azcopy_verify()
self.assertTrue(result)
# Sync 1KB file to local using azcopy.
src = blob_path
dest = file_path
result = util.Command("sync").add_arguments(src).add_arguments(dest). \
add_flags("log-level", "info").execute_azcopy_copy_command()
self.assertTrue(result)
# Sync 1KB file to blob using azcopy.
# reset local file lmt first
util.create_test_file(filename, 1024)
src = file_path
dest = blob_path
result = util.Command("sync").add_arguments(src).add_arguments(dest). \
add_flags("log-level", "info").execute_azcopy_copy_command()
self.assertTrue(result)
def test_sync_entire_directory(self):
dir_name = "dir_sync_test"
dir_path = util.create_test_n_files(1024, 10, dir_name)
# create sub-directory inside directory
sub_dir_name = os.path.join(dir_name, "sub_dir_sync_test")
util.create_test_n_files(1024, 10, sub_dir_name)
# upload the directory with 20 files
# upload the directory
# execute azcopy command
result = util.Command("copy").add_arguments(dir_path).add_arguments(util.test_container_url). \
add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
self.assertTrue(result)
# execute the validator.
vdir_sas = util.get_resource_sas(dir_name)
result = util.Command("testBlob").add_arguments(dir_path).add_arguments(vdir_sas). \
add_flags("is-object-dir", "true").execute_azcopy_verify()
self.assertTrue(result)
# sync to local
src = vdir_sas
dst = dir_path
result = util.Command("sync").add_arguments(src).add_arguments(dst).add_flags("log-level", "info")\
.execute_azcopy_copy_command()
self.assertTrue(result)
# sync back to blob after recreating the files
util.create_test_n_files(1024, 10, sub_dir_name)
src = dir_path
dst = vdir_sas
result = util.Command("sync").add_arguments(src).add_arguments(dst).add_flags("log-level", "info") \
.execute_azcopy_copy_command()
self.assertTrue(result)
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
chk "gopkg.in/check.v1"
)
type genericFilterSuite struct{}
var _ = chk.Suite(&genericFilterSuite{})
func (s *genericFilterSuite) TestIncludeFilter(c *chk.C) {
// set up the filters
raw := rawSyncCmdArgs{}
includePatternList := raw.parsePatterns("*.pdf;*.jpeg;exactName")
includeFilter := buildIncludeFilters(includePatternList)[0]
// test the positive cases
filesToPass := []string{"bla.pdf", "fancy.jpeg", "socool.jpeg.pdf", "exactName"}
for _, file := range filesToPass {
passed := includeFilter.doesPass(storedObject{name: file})
c.Assert(passed, chk.Equals, true)
}
// test the negative cases
filesNotToPass := []string{"bla.pdff", "fancyjpeg", "socool.jpeg.pdf.wut", "eexactName"}
for _, file := range filesNotToPass {
passed := includeFilter.doesPass(storedObject{name: file})
c.Assert(passed, chk.Equals, false)
}
}
func (s *genericFilterSuite) TestExcludeFilter(c *chk.C) {
// set up the filters
raw := rawSyncCmdArgs{}
excludePatternList := raw.parsePatterns("*.pdf;*.jpeg;exactName")
excludeFilterList := buildExcludeFilters(excludePatternList)
// test the positive cases
filesToPass := []string{"bla.pdfe", "fancy.jjpeg", "socool.png", "eexactName"}
for _, file := range filesToPass {
dummyProcessor := &dummyProcessor{}
err := processIfPassedFilters(excludeFilterList, storedObject{name: file}, dummyProcessor.process)
c.Assert(err, chk.IsNil)
c.Assert(len(dummyProcessor.record), chk.Equals, 1)
}
// test the negative cases
filesToNotPass := []string{"bla.pdf", "fancy.jpeg", "socool.jpeg.pdf", "exactName"}
for _, file := range filesToNotPass {
dummyProcessor := &dummyProcessor{}
err := processIfPassedFilters(excludeFilterList, storedObject{name: file}, dummyProcessor.process)
c.Assert(err, chk.IsNil)
c.Assert(len(dummyProcessor.record), chk.Equals, 0)
}
}
<file_sep># Change Log
## Version XX.XX.XX
### Bug fix
1. Added error to using Azure Files without a SAS token (invalid auth configuration)
1. AzCopy v10 now outputs a sensible error & warning when attempting to authenticate a storage account business-to-business
1. `--log-level=none` now drops no logs, and has a listing in `--help`
## Version 10.1.2
### Breaking change
1. Jobs created with earlier releases cannot be resumed with this release. We recommend
you update to this release only when you have no partially-completed jobs that you want to resume.
### Bug fix
1. Files with `Content-Encoding: gzip` are now downloaded in compressed form. Previous versions tried to save a
decompressed version of the file. But they incorrectly truncated it at the original _compressed_ length, so the
downloaded file was not complete.
By changing AzCopy to save the compressed version, that problem is solved, and Content-MD5 checks now work for such files. (It is
assumed that the Content-MD5 hash is the hash of the _compressed_ file.)
### New features
1. Headers for Content-Disposition, Content-Language and Cache-Control can now be set when uploading
files to Blob Storage and to Azure Files. Run `azcopy copy --help` to see command line parameter
information, including those needed to set the new headers.
1. On-screen job summary is output to the log file at end of job, so that the log will include those summary statistics.
## Version 10.1.1
### Bug fixes
1. Fixed typo in local traverser (error handling in walk).
1. Fixed memory alignment issue for atomic functions on 32 bit system.
## Version 10.1.0 (GA)
### Breaking changes
1. The `--block-size` parameter has been replaced by `--block-size-mb`. The old parameter took a number of _bytes_; the
new one takes a number of Megabytes (MiB).
1. The following command line parameters have been renamed, for clarity
* `--output` is now `--output-type`
* `--md5-validation` is now called `--check-md5`
* `--s2s-source-change-validation` is now called `--s2s-detect-source-changed`
* `--s2s-invalid-metadata-handle` is is now called `--s2s-handle-invalid-metadata`
* `--quota` (in the `make` command) is now called `--quota-gb`. Note that the values were always in GB, the new name
simply clarifies that fact
### New features
1. AzCopy is now able to be configured to use older API versions. This enables (limited) support for Azure Stack.
1. Listing command now shows file sizes.
### Bug fixes
1. AzCopy v10 now works correctly with ADLS Gen 2 folders that contain spaces in their names.
1. When cancelling with CRTL-C, status of in-progress transfers is now correctly recorded.
1. For security, the Service-to-Service (S2S) feature will only work if both the source and destination connections are
HTTPS.
1. Use of the `--overwrite` parameter is clarified in the in-application help.
1. Fixed incorrect behavior with setting file descriptor limits on platforms including OS X and BSD.
1. On Linux and OS X, log files are now created with same file permissions as all other files created by AzCopy.
1. ThirdPartyNotice.txt is updated.
1. Load DLL in a more secure manner compatible with Go's sysdll registration.
1. Fixed support for relative paths and shorthands.
1. Fixed bug in pattern matching for blob download when recursive is off.
## Version 10.0.9 (Release Candidate)
### Breaking changes
1. For creating MD5 hashes when uploading, version 10.x now has the OPPOSITE default to version
AzCopy 8.x. Specifically, as of version 10.0.9, MD5 hashes are NOT created by default. To create
Content-MD5 hashs when uploading, you must now specify `--put-md5` on the command line.
### New features
1. Can migrate data directly from Amazon Web Services (AWS). In this high-performance data path
the data is read directly from AWS by the Azure Storage service. It does not need to pass through
the machine running AzCopy. The copy happens syncronously, so you can see its exact progress.
1. Can migrate data directly from Azure Files or Azure Blobs (any blob type) to Azure Blobs (any
blob type). In this high-performance data path the data is read directly from the source by the
Azure Storage service. It does not need to pass through the machine running AzCopy. The copy
happens syncronously, so you can see its exact progress.
1. Sync command prompts with 4 options about deleting unneeded files from the target: Yes, No, All or
None. (Deletion only happens if the `--delete-destination` flag is specified).
1. Can download to /dev/null. This throws the data away - but is useful for testing raw network
performance unconstrained by disk; and also for validing MD5 hashes in bulk (when run in a cloud
VM in the same region as the Storage account)
### Bug fixes
1. Fixed memory leak when downloading large files
1. Fixed performance when downloading a single large file
1. Fixed bug with "too many open files" on Linux
1. Fixed memory leak when uploading sparse files (files with big blocks of zeros) to Page Blobs and
Azure Files.
1. Fixed issue where application crashed after being throttled by Azure Storage Service. (The
primary fix here is for Page Blobs, but a secondary part of the fix also helps with Block Blobs.)
1. Fixed functionality and usabilty issues with `remove` command
1. Improved performance for short-duration jobs (e.g. those lasting less than a minute)
1. Prevent unnecessary error message that sometimes appeared when cancelling a job
1. Various improvements to the online help and error messages.
## Version 10.0.8:
1. Rewrote sync command to eliminate numerous bugs and improve usability (see wiki for details)
1. Implemented various improvements to memory management
1. Added MD5 validation support (available options: NoCheck, LogOnly, FailIfDifferent, FailIfDifferentOrMissing)
1. Added last modified time checks for source to guarantee transfer integrity
1. Formalized outputs in JSON and elevated the output flag to the root level
1. Eliminated outputs to STDERR (for new version notifications), which were causing problems for certain CI systems
1. Improved log format for Windows
1. Optimized plan file sizes
1. Improved command line parameter names as follows (to be consistent with naming pattern of other parameters):
1. fromTo -> from-to
1. blobType -> blob-type
1. excludedBlobType -> excluded-blob-type
1. outputRaw (in "list" command) -> output
1. stdIn-enable (reserved for internal use) -> stdin-enable
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"context"
"github.com/Azure/azure-storage-azcopy/common"
"sync/atomic"
"time"
)
// pacerConsumer is used by callers whose activity must be controlled to a certain pace
type pacerConsumer interface {
RequestRightToSend(ctx context.Context, bytesToSend int64) error
Close() error
}
const (
// How long to sleep in the loop that puts tokens into the bucket
bucketFillSleepDuration = time.Duration(float32(time.Second) * 0.1)
// How long to sleep when reading from the bucket and finding there's not enough tokens
bucketDrainSleepDuration = time.Duration(float32(time.Second) * 0.5)
// Controls the max amount by which the contents of the token bucket can build up, unused.
maxSecondsToOverpopulateBucket = 5 // suitable for coarse grained, but not for fine-grained pacing
)
// tokenBucketPacer allows us to control the pace of an activity, using a basic token bucket algorithm.
// The target rate is fixed, but can be modified at any time through SetTargetBytesPerSecond
type tokenBucketPacer struct {
atomicTokenBucket int64
atomicTargetBytesPerSecond int64
expectedBytesPerRequest int64
done chan struct{}
}
func newTokenBucketPacer(ctx context.Context, bytesPerSecond int64, expectedBytesPerRequest uint32) *tokenBucketPacer {
p := &tokenBucketPacer{atomicTokenBucket: int64(expectedBytesPerRequest), // seed it immediately with enough to satisfy one request
atomicTargetBytesPerSecond: bytesPerSecond,
expectedBytesPerRequest: int64(expectedBytesPerRequest),
done: make(chan struct{}),
}
// the pacer runs in a separate goroutine for as long as the ctx lasts
go p.pacerBody(ctx)
return p
}
// RequestRightToSend function is called by goroutines to request right to send a certain amount of bytes.
// It controls their rate by blocking until they are allowed to proceed
func (p *tokenBucketPacer) RequestRightToSend(ctx context.Context, bytesToSend int64) error {
for atomic.AddInt64(&p.atomicTokenBucket, -bytesToSend) < 0 {
// by taking our desired count we've moved below zero, which means our allocation is not available
// right now, so put back what we asked for, and wait
atomic.AddInt64(&p.atomicTokenBucket, bytesToSend)
select {
case <-ctx.Done():
return ctx.Err()
case <-time.After(bucketDrainSleepDuration):
// keep looping
}
}
return nil
}
func (p *tokenBucketPacer) Close() error {
close(p.done)
return nil
}
func (p *tokenBucketPacer) pacerBody(ctx context.Context) {
lastTime := time.Now()
for {
select {
case <-ctx.Done(): // TODO: review use of context here. Alternative is just to insist that user calls Close when done
return
case <-p.done:
return
default:
}
currentTarget := atomic.LoadInt64(&p.atomicTargetBytesPerSecond)
time.Sleep(bucketFillSleepDuration)
elapsedSeconds := time.Since(lastTime).Seconds()
bytesToRelease := int64(float64(currentTarget) * elapsedSeconds)
newTokenCount := atomic.AddInt64(&p.atomicTokenBucket, bytesToRelease)
// If the backlog of unsent bytes is now too great, then trim it back down.
// Why don't we want a big backlog? Because it limits our ability to accurately control the speed.
maxAllowedUnsentBytes := int64(float32(currentTarget) * maxSecondsToOverpopulateBucket)
if maxAllowedUnsentBytes < p.expectedBytesPerRequest {
maxAllowedUnsentBytes = p.expectedBytesPerRequest // just in case we are very coarse grained at a very slow speed
}
if newTokenCount > maxAllowedUnsentBytes {
common.AtomicMorphInt64(&p.atomicTokenBucket, func(currentVal int64) (newVal int64, _ interface{}) {
newVal = currentVal
if currentVal > maxAllowedUnsentBytes {
newVal = maxAllowedUnsentBytes
}
return
})
}
lastTime = time.Now()
}
}
func (p *tokenBucketPacer) targetBytesPerSecond() int64 {
return atomic.LoadInt64(&p.atomicTargetBytesPerSecond)
}
func (p *tokenBucketPacer) setTargetBytesPerSecond(value int64) {
atomic.StoreInt64(&p.atomicTargetBytesPerSecond, value)
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"fmt"
"net/url"
"strings"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
)
// allow us to iterate through a path pointing to the blob endpoint
type blobTraverser struct {
rawURL *url.URL
p pipeline.Pipeline
ctx context.Context
recursive bool
// a generic function to notify that a new stored object has been enumerated
incrementEnumerationCounter func()
}
func (t *blobTraverser) getPropertiesIfSingleBlob() (*azblob.BlobGetPropertiesResponse, bool) {
blobURL := azblob.NewBlobURL(*t.rawURL, t.p)
blobProps, blobPropertiesErr := blobURL.GetProperties(t.ctx, azblob.BlobAccessConditions{})
// if there was no problem getting the properties, it means that we are looking at a single blob
if blobPropertiesErr == nil && !gCopyUtil.doesBlobRepresentAFolder(blobProps.NewMetadata()) {
return blobProps, true
}
return nil, false
}
func (t *blobTraverser) traverse(processor objectProcessor, filters []objectFilter) (err error) {
blobUrlParts := azblob.NewBlobURLParts(*t.rawURL)
util := copyHandlerUtil{}
// check if the url points to a single blob
blobProperties, isBlob := t.getPropertiesIfSingleBlob()
if isBlob {
storedObject := newStoredObject(
getObjectNameOnly(blobUrlParts.BlobName),
"", // relative path makes no sense when the full path already points to the file
blobProperties.LastModified(),
blobProperties.ContentLength(),
blobProperties.ContentMD5(),
blobProperties.BlobType(),
)
if t.incrementEnumerationCounter != nil {
t.incrementEnumerationCounter()
}
return processIfPassedFilters(filters, storedObject, processor)
}
// get the container URL so that we can list the blobs
containerRawURL := copyHandlerUtil{}.getContainerUrl(blobUrlParts)
containerURL := azblob.NewContainerURL(containerRawURL, t.p)
// get the search prefix to aid in the listing
// example: for a url like https://test.blob.core.windows.net/test/foo/bar/bla
// the search prefix would be foo/bar/bla
searchPrefix := blobUrlParts.BlobName
// append a slash if it is not already present
// example: foo/bar/bla becomes foo/bar/bla/ so that we only list children of the virtual directory
if searchPrefix != "" && !strings.HasSuffix(searchPrefix, common.AZCOPY_PATH_SEPARATOR_STRING) {
searchPrefix += common.AZCOPY_PATH_SEPARATOR_STRING
}
for marker := (azblob.Marker{}); marker.NotDone(); {
// look for all blobs that start with the prefix
// TODO optimize for the case where recursive is off
listBlob, err := containerURL.ListBlobsFlatSegment(t.ctx, marker,
azblob.ListBlobsSegmentOptions{Prefix: searchPrefix, Details: azblob.BlobListingDetails{Metadata: true}})
if err != nil {
return fmt.Errorf("cannot list blobs. Failed with error %s", err.Error())
}
// process the blobs returned in this result segment
for _, blobInfo := range listBlob.Segment.BlobItems {
// if the blob represents a hdi folder, then skip it
if util.doesBlobRepresentAFolder(blobInfo.Metadata) {
continue
}
relativePath := strings.TrimPrefix(blobInfo.Name, searchPrefix)
// if recursive
if !t.recursive && strings.Contains(relativePath, common.AZCOPY_PATH_SEPARATOR_STRING) {
continue
}
storedObject := newStoredObject(
getObjectNameOnly(blobInfo.Name),
relativePath,
blobInfo.Properties.LastModified,
*blobInfo.Properties.ContentLength,
blobInfo.Properties.ContentMD5,
blobInfo.Properties.BlobType,
)
if t.incrementEnumerationCounter != nil {
t.incrementEnumerationCounter()
}
processErr := processIfPassedFilters(filters, storedObject, processor)
if processErr != nil {
return processErr
}
}
marker = listBlob.NextMarker
}
return
}
func newBlobTraverser(rawURL *url.URL, p pipeline.Pipeline, ctx context.Context, recursive bool, incrementEnumerationCounter func()) (t *blobTraverser) {
t = &blobTraverser{rawURL: rawURL, p: p, ctx: ctx, recursive: recursive, incrementEnumerationCounter: incrementEnumerationCounter}
return
}
<file_sep>module github.com/Azure/azure-storage-azcopy
require (
github.com/Azure/azure-pipeline-go v0.1.9
github.com/Azure/azure-storage-blob-go v0.6.0
github.com/Azure/azure-storage-file-go v0.0.0-20190108093629-d93e19c84c2a
github.com/Azure/go-autorest v10.15.2+incompatible
github.com/JeffreyRichter/enum v0.0.0-20180725232043-2567042f9cda
github.com/danieljoos/wincred v1.0.1
github.com/dgrijalva/jwt-go v3.2.0+incompatible
github.com/go-ini/ini v1.41.0 // indirect
github.com/inconshreveable/mousetrap v1.0.0
github.com/jiacfan/keychain v0.0.0-20180920053336-f2c902a3d807
github.com/jiacfan/keyctl v0.0.0-20160328205232-988d05162bc5
github.com/kr/pretty v0.1.0
github.com/kr/text v0.1.0
github.com/minio/minio-go v6.0.12+incompatible
github.com/mitchellh/go-homedir v1.0.0 // indirect
github.com/spf13/cobra v0.0.3
github.com/spf13/pflag v1.0.2
github.com/stretchr/testify v1.3.0 // indirect
golang.org/x/crypto v0.0.0-20190103213133-ff983b9c42bc // indirect
golang.org/x/net v0.0.0-20190110200230-915654e7eabc // indirect
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4
golang.org/x/sys v0.0.0-20190109145017-48ac38b7c8cb // indirect
golang.org/x/text v0.3.0 // indirect
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127
)
<file_sep>package cmd
import (
"context"
"encoding/base64"
"errors"
"fmt"
"net/url"
"net/http"
"time"
"strings"
"github.com/Azure/azure-storage-azcopy/azbfs"
"github.com/Azure/azure-storage-azcopy/common"
)
type copyDownloadBlobFSEnumerator common.CopyJobPartOrderRequest
func (e *copyDownloadBlobFSEnumerator) enumerate(cca *cookedCopyCmdArgs) error {
util := copyHandlerUtil{}
ctx := context.Background()
// create blob FS pipeline.
p, err := createBlobFSPipeline(ctx, e.CredentialInfo)
if err != nil {
return err
}
// attempt to parse the source url
sourceURL, err := url.Parse(cca.source)
if err != nil {
return errors.New("cannot parse source URL")
}
// parse the given source URL into fsUrlParts, which separates the filesystem name and directory/file path
fsUrlParts := azbfs.NewBfsURLParts(*sourceURL)
// we do not know if the source is a file or a directory
// we assume it is a directory and get its properties
directoryURL := azbfs.NewDirectoryURL(*sourceURL, p)
props, err := directoryURL.GetProperties(ctx)
// Case-1: If the source URL is actually a file
// then we should short-circuit and simply download that file
if err == nil && strings.EqualFold(props.XMsResourceType(), "file") {
var destination = ""
// if the destination is an existing directory, then put the file under it
// otherwise assume the user has provided a specific path for the destination file
if util.isPathALocalDirectory(cca.destination) {
destination = util.generateLocalPath(cca.destination, util.getFileNameFromPath(fsUrlParts.DirectoryOrFilePath))
} else {
destination = cca.destination
}
fileSize := props.ContentLength()
// Queue the transfer
e.addTransfer(common.CopyTransfer{
Source: cca.source,
Destination: destination,
LastModifiedTime: e.parseLmt(props.LastModified()),
SourceSize: fileSize,
ContentMD5: props.ContentMD5(),
}, cca)
return e.dispatchFinalPart(cca)
}
if err != nil {
if isFatal := handleSingleFileValidationErrorForADLSGen2(err); isFatal {
return err
}
}
glcm.Info(infoCopyFromDirectoryListOfFiles)
// Case-2: Source is a filesystem or directory
// In this case, the destination should be a directory.
if !gCopyUtil.isPathALocalDirectory(cca.destination) && !strings.EqualFold(cca.destination, common.Dev_Null) {
return fmt.Errorf("the destination must be an existing directory in this download scenario")
}
srcADLSGen2PathURLPartExtension := adlsGen2PathURLPartsExtension{fsUrlParts}
parentSourcePath := srcADLSGen2PathURLPartExtension.getParentSourcePath()
// The case when user provide list of files to copy. It is used by internal integration.
if len(cca.listOfFilesToCopy) > 0 {
for _, fileOrDir := range cca.listOfFilesToCopy {
tempURLPartsExtension := srcADLSGen2PathURLPartExtension
if len(parentSourcePath) > 0 && parentSourcePath[len(parentSourcePath)-1] == common.AZCOPY_PATH_SEPARATOR_CHAR {
parentSourcePath = parentSourcePath[0 : len(parentSourcePath)-1]
}
// Try to see if this is a file path, and download the file if it is.
// Create the path using the given source and files mentioned with listOfFile flag.
// For Example:
// 1. source = "https://sdksampleperftest.dfs.core.windows.net/bigdata" file = "file1.txt" blobPath= "file1.txt"
// 2. source = "https://sdksampleperftest.dfs.core.windows.net/bigdata/dir-1" file = "file1.txt" blobPath= "dir-1/file1.txt"
filePath := fmt.Sprintf("%s%s%s", parentSourcePath, common.AZCOPY_PATH_SEPARATOR_STRING, fileOrDir)
if len(filePath) > 0 && filePath[0] == common.AZCOPY_PATH_SEPARATOR_CHAR {
filePath = filePath[1:]
}
tempURLPartsExtension.DirectoryOrFilePath = filePath
fileURL := azbfs.NewFileURL(tempURLPartsExtension.URL(), p)
if fileProperties, err := fileURL.GetProperties(ctx); err == nil && strings.EqualFold(fileProperties.XMsResourceType(), "file") {
// file exists
fileSize := fileProperties.ContentLength()
// assembling the file relative path
fileRelativePath := fileOrDir
// ensure there is no additional AZCOPY_PATH_SEPARATOR_CHAR at the start of file name
if len(fileRelativePath) > 0 && fileRelativePath[0] == common.AZCOPY_PATH_SEPARATOR_CHAR {
fileRelativePath = fileRelativePath[1:]
}
// check for the special character in blob relative path and get path without special character.
fileRelativePath = util.blobPathWOSpecialCharacters(fileRelativePath)
srcURL := tempURLPartsExtension.createADLSGen2PathURLFromFileSystem(filePath)
e.addTransfer(common.CopyTransfer{
Source: srcURL.String(),
Destination: util.generateLocalPath(cca.destination, fileRelativePath),
LastModifiedTime: e.parseLmt(fileProperties.LastModified()),
SourceSize: fileSize,
ContentMD5: fileProperties.ContentMD5(),
}, cca)
continue
}
if !cca.recursive {
glcm.Info(fmt.Sprintf("error fetching properties of %s. Either it is a directory or getting the file properties failed. For directories try using the recursive flag.", filePath))
continue
}
// Try to see if this is a directory, and download the directory if it is.
dirURL := azbfs.NewDirectoryURL(tempURLPartsExtension.URL(), p)
err := enumerateFilesInADLSGen2Directory(
ctx,
dirURL,
func(fileItem azbfs.Path) bool { // filter always return true in this case
return true
},
func(fileItem azbfs.Path) error {
relativePath := strings.Replace(*fileItem.Name, parentSourcePath, "", 1)
if len(relativePath) > 0 && relativePath[0] == common.AZCOPY_PATH_SEPARATOR_CHAR {
relativePath = relativePath[1:]
}
relativePath = util.blobPathWOSpecialCharacters(relativePath)
return e.addTransfer(common.CopyTransfer{
Source: dirURL.FileSystemURL().NewDirectoryURL(*fileItem.Name).String(), // This point to file
Destination: util.generateLocalPath(cca.destination, relativePath),
LastModifiedTime: e.parseLmt(*fileItem.LastModified),
SourceSize: *fileItem.ContentLength,
ContentMD5: getContentMd5(ctx, directoryURL, fileItem, cca.md5ValidationOption),
}, cca)
},
)
if err != nil {
glcm.Info(fmt.Sprintf("cannot list files inside directory %s mentioned", filePath))
continue
}
}
// If there are no transfer to queue up, exit with message
if len(e.Transfers) == 0 {
glcm.Error(fmt.Sprintf("no transfer queued for copying data from %s to %s", cca.source, cca.destination))
return nil
}
// dispatch the JobPart as Final Part of the Job
err = e.dispatchFinalPart(cca)
if err != nil {
return err
}
return nil
}
// Following is original code path, which handles the case when list of files is not specified
// if downloading entire file system, then create a local directory with the file system's name
if fsUrlParts.DirectoryOrFilePath == "" {
cca.destination = util.generateLocalPath(cca.destination, fsUrlParts.FileSystemName)
}
// initialize an empty continuation marker
continuationMarker := ""
// list out the directory and download its files
// loop will continue unless the continuationMarker received in the response is empty
for {
dListResp, err := directoryURL.ListDirectorySegment(ctx, &continuationMarker, true)
if err != nil {
return fmt.Errorf("error listing the files inside the given source url %s: %s", directoryURL.String(), err.Error())
}
// get only the files inside the given path
// TODO: currently empty directories are not created, consider creating them
for _, path := range dListResp.Files() {
// Queue the transfer
e.addTransfer(common.CopyTransfer{
Source: directoryURL.FileSystemURL().NewDirectoryURL(*path.Name).String(),
Destination: util.generateLocalPath(cca.destination, util.getRelativePath(fsUrlParts.DirectoryOrFilePath, *path.Name)),
LastModifiedTime: e.parseLmt(*path.LastModified),
SourceSize: *path.ContentLength,
ContentMD5: getContentMd5(ctx, directoryURL, path, cca.md5ValidationOption),
}, cca)
}
// update the continuation token for the next list operation
continuationMarker = dListResp.XMsContinuation()
// determine whether listing should be done
if continuationMarker == "" {
break
}
}
// dispatch the JobPart as Final Part of the Job
err = e.dispatchFinalPart(cca)
if err != nil {
return err
}
return nil
}
func getContentMd5(ctx context.Context, directoryURL azbfs.DirectoryURL, file azbfs.Path, md5ValidationOption common.HashValidationOption) []byte {
if md5ValidationOption == common.EHashValidationOption.NoCheck() {
return nil // not gonna check it, so don't need it
}
var returnValueForError []byte = nil // If we get an error, we just act like there was no content MD5. If validation is set to fail on error, this will fail the transfer of this file later on (at the time of the MD5 check)
// convert format of what we have, if we have something in the PathListResponse from Service
if file.ContentMD5Base64 != nil {
value, err := base64.StdEncoding.DecodeString(*file.ContentMD5Base64)
if err != nil {
return returnValueForError
}
return value
}
// Fall back to making a new round trip to the server
// This is an interim measure, so that we can still validate MD5s even before they are being returned in the server's
// PathList response
// TODO: remove this in a future release, once we know that Service is always returning the MD5s in the PathListResponse.
// Why? Because otherwise, if there's a file with NO MD5, we'll make a round-trip here, but that's pointless if we KNOW that
// that Service is always returning them in the PathListResponse which we've already checked above.
// As at mid-Feb 2019, we don't KNOW that (in fact it's not returning them in the PathListResponse) so we need this code for now.
fileURL := directoryURL.FileSystemURL().NewDirectoryURL(*file.Name)
props, err := fileURL.GetProperties(ctx)
if err != nil {
return returnValueForError
}
return props.ContentMD5()
}
func (e *copyDownloadBlobFSEnumerator) parseLmt(lastModifiedTime string) time.Time {
// if last modified time is available, parse it
// otherwise use the current time as last modified time
lmt := time.Now()
if lastModifiedTime != "" {
parsedLmt, err := time.Parse(http.TimeFormat, lastModifiedTime)
if err == nil {
lmt = parsedLmt
}
}
return lmt
}
func (e *copyDownloadBlobFSEnumerator) addTransfer(transfer common.CopyTransfer, cca *cookedCopyCmdArgs) error {
// if we are downloading to dev null, we must point to devNull itself, rather than some file under it
if strings.EqualFold(e.DestinationRoot, common.Dev_Null) {
transfer.Destination = ""
}
return addTransfer((*common.CopyJobPartOrderRequest)(e), transfer, cca)
}
func (e *copyDownloadBlobFSEnumerator) dispatchFinalPart(cca *cookedCopyCmdArgs) error {
return dispatchFinalPart((*common.CopyJobPartOrderRequest)(e), cca)
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package common
// GetBlocksRoundedUp returns the number of blocks given sie, rounded up
func GetBlocksRoundedUp(size uint64, blockSize uint64) uint16 {
return uint16(size/blockSize) + Iffuint16((size%blockSize) == 0, 0, 1)
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// inline if functions
func IffError(test bool, trueVal, falseVal error) error {
if test {
return trueVal
}
return falseVal
}
func IffString(test bool, trueVal, falseVal string) string {
if test {
return trueVal
}
return falseVal
}
func IffUint8(test bool, trueVal, falseVal uint8) byte {
if test {
return trueVal
}
return falseVal
}
func Iffint8(test bool, trueVal, falseVal int8) int8 {
if test {
return trueVal
}
return falseVal
}
func Iffuint16(test bool, trueVal, falseVal uint16) uint16 {
if test {
return trueVal
}
return falseVal
}
func Iffint16(test bool, trueVal, falseVal int16) int16 {
if test {
return trueVal
}
return falseVal
}
func Iffuint32(test bool, trueVal, falseVal uint32) uint32 {
if test {
return trueVal
}
return falseVal
}
func Iffint32(test bool, trueVal, falseVal int32) int32 {
if test {
return trueVal
}
return falseVal
}
func Iffuint64(test bool, trueVal, falseVal uint64) uint64 {
if test {
return trueVal
}
return falseVal
}
func Iffint64(test bool, trueVal, falseVal int64) int64 {
if test {
return trueVal
}
return falseVal
}
func Iffloat64(test bool, trueVal, falseVal float64) float64 {
if test {
return trueVal
}
return falseVal
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"sync/atomic"
)
// s2sPacer is currently only used to calculate average transfer speed.
type s2sPacer struct {
p *pacer
}
// creates S2S pacer for speed calculation
func newS2SPacer(p *pacer) *s2sPacer {
if p == nil {
panic("p must not be nil")
}
return &s2sPacer{p: p}
}
// Done adds bytes transferred.
func (s2sp *s2sPacer) Done(n int64) {
atomic.AddInt64(&s2sp.p.bytesTransferred, int64(n))
}
<file_sep>[[constraint]]
name = "github.com/Azure/azure-pipeline-go"
version = "0.1.9"
[[constraint]]
name = "github.com/Azure/azure-storage-file-go"
version = "0.4.1"
[[constraint]]
name = "github.com/Azure/go-autorest"
version = "11.3.1"
[[constraint]]
branch = "master"
name = "github.com/JeffreyRichter/enum"
[[constraint]]
name = "github.com/danieljoos/wincred"
version = "1.0.1"
[[constraint]]
name = "github.com/jiacfan/keychain"
version = "0.2.0"
[[constraint]]
name = "github.com/jiacfan/keyctl"
version = "0.2.0"
[[constraint]]
name = "github.com/minio/minio-go"
version = "6.0.12"
[[constraint]]
name = "github.com/spf13/cobra"
version = "0.0.3"
[[constraint]]
branch = "master"
name = "golang.org/x/sync"
[[constraint]]
branch = "v1"
name = "gopkg.in/check.v1"
[prune]
go-tests = true
unused-packages = true
[[constraint]]
name = "github.com/dgrijalva/jwt-go"
version = "3.2.0"
[[constraint]]
name = "github.com/go-ini/ini"
version = "1.42.0"
[[constraint]]
name = "github.com/inconshreveable/mousetrap"
version = "1.0.0"
[[constraint]]
name = "github.com/kr/pretty"
version = "0.1.0"
[[constraint]]
name = "github.com/stretchr/testify"
version = "1.3.0"
[[constraint]]
name = "github.com/Azure/azure-storage-blob-go"
version = "v0.6.0"
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package common
type EnvironmentVariable struct {
Name string
DefaultValue string
Description string
}
// This array needs to be updated when a new public environment variable is added
var VisibleEnvironmentVariables = []EnvironmentVariable{
EEnvironmentVariable.ConcurrencyValue(),
EEnvironmentVariable.LogLocation(),
EEnvironmentVariable.AWSAccessKeyID(),
EEnvironmentVariable.AWSSecretAccessKey(),
EEnvironmentVariable.ShowPerfStates(),
EEnvironmentVariable.PacePageBlobs(),
EEnvironmentVariable.DefaultServiceApiVersion(),
}
var EEnvironmentVariable = EnvironmentVariable{}
func (EnvironmentVariable) ConcurrencyValue() EnvironmentVariable {
return EnvironmentVariable{
Name: "AZCOPY_CONCURRENCY_VALUE",
Description: "Overrides how many Go Routines work on transfers. By default, this number is determined based on the number of logical cores on the machine.",
}
}
func (EnvironmentVariable) LogLocation() EnvironmentVariable {
return EnvironmentVariable{
Name: "AZCOPY_LOG_LOCATION",
Description: "Overrides where the log files are stored, to avoid filling up a disk.",
}
}
func (EnvironmentVariable) AccountName() EnvironmentVariable {
return EnvironmentVariable{Name: "ACCOUNT_NAME"}
}
func (EnvironmentVariable) AccountKey() EnvironmentVariable {
return EnvironmentVariable{Name: "ACCOUNT_KEY"}
}
func (EnvironmentVariable) ProfileCPU() EnvironmentVariable {
return EnvironmentVariable{Name: "AZCOPY_PROFILE_CPU"}
}
func (EnvironmentVariable) ProfileMemory() EnvironmentVariable {
return EnvironmentVariable{Name: "AZCOPY_PROFILE_MEM"}
}
func (EnvironmentVariable) PacePageBlobs() EnvironmentVariable {
return EnvironmentVariable{
Name: "AZCOPY_PACE_PAGE_BLOBS",
Description: "Should throughput for page blobs automatically be adjusted to match Service limits? Default is true. Set to 'false' to disable",
}
}
func (EnvironmentVariable) ShowPerfStates() EnvironmentVariable {
return EnvironmentVariable{
Name: "AZCOPY_SHOW_PERF_STATES",
Description: "If set, to anything, on-screen output will include counts of chunks by state",
}
}
func (EnvironmentVariable) AWSAccessKeyID() EnvironmentVariable {
return EnvironmentVariable{
Name: "AWS_ACCESS_KEY_ID",
Description: "The AWS access key ID for S3 source used in service to service copy.",
}
}
func (EnvironmentVariable) AWSSecretAccessKey() EnvironmentVariable {
return EnvironmentVariable{
Name: "AWS_SECRET_ACCESS_KEY",
Description: "The AWS secret access key for S3 source used in service to service copy.",
}
}
// AwsSessionToken is temporaily internally reserved, and not exposed to users.
func (EnvironmentVariable) AwsSessionToken() EnvironmentVariable {
return EnvironmentVariable{Name: "AWS_SESSION_TOKEN"}
}
// OAuthTokenInfo is only used for internal integration.
func (EnvironmentVariable) OAuthTokenInfo() EnvironmentVariable {
return EnvironmentVariable{Name: "AZCOPY_OAUTH_TOKEN_INFO"}
}
// CredentialType is only used for internal integration.
func (EnvironmentVariable) CredentialType() EnvironmentVariable {
return EnvironmentVariable{Name: "AZCOPY_CRED_TYPE"}
}
func (EnvironmentVariable) DefaultServiceApiVersion() EnvironmentVariable {
return EnvironmentVariable{
Name: "AZCOPY_DEFAULT_SERVICE_API_VERSION",
DefaultValue: "2018-03-28",
Description: "Overrides the service API version so that AzCopy could accommodate custom environments such as Azure Stack.",
}
}
<file_sep>package cmd
import (
"context"
"errors"
"fmt"
"net/url"
"strings"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-azcopy/ste"
"github.com/Azure/azure-storage-blob-go/azblob"
)
type copyDownloadBlobEnumerator common.CopyJobPartOrderRequest
func (e *copyDownloadBlobEnumerator) enumerate(cca *cookedCopyCmdArgs) error {
util := copyHandlerUtil{}
ctx := context.WithValue(context.Background(), ste.ServiceAPIVersionOverride, ste.DefaultServiceApiVersion)
// Create Pipeline to Get the Blob Properties or List Blob Segment
p, err := createBlobPipeline(ctx, e.CredentialInfo)
if err != nil {
return err
}
// attempt to parse the source url
sourceUrl, err := url.Parse(cca.source)
if err != nil {
return errors.New("cannot parse source URL")
}
// append the sas at the end of query params.
sourceUrl = util.appendQueryParamToUrl(sourceUrl, cca.sourceSAS)
// get the blob parts
blobUrlParts := azblob.NewBlobURLParts(*sourceUrl)
// First Check if source blob exists
// This check is in place to avoid listing of the blobs and matching the given blob against it
// For example given source is https://<container>/a?<query-params> and there exists other blobs aa and aab
// Listing the blobs with prefix /a will list other blob as well
blobUrl := azblob.NewBlobURL(*sourceUrl, p)
srcBlobURLPartsExtension := blobURLPartsExtension{BlobURLParts: blobUrlParts}
if srcBlobURLPartsExtension.isBlobSyntactically() {
blobProperties, err := blobUrl.GetProperties(ctx, azblob.BlobAccessConditions{})
// If the source blob exists, then queue transfer and return
// Example: https://<container>/<blob>?<query-params>
if err == nil {
// For a single blob, destination provided can be either a directory or file.
// If the destination is directory, then name of blob is preserved
// If the destination is file, then blob will be downloaded as the given file name
// Example1: Downloading https://<container>/a?<query-params> to directory C:\\Users\\User1
// will download the blob as C:\\Users\\User1\\a
// Example2: Downloading https://<container>/a?<query-params> to directory C:\\Users\\User1\\b
// (b is not a directory) will download blob as C:\\Users\\User1\\b
var blobLocalPath string
if util.isPathALocalDirectory(cca.destination) {
blobNameFromUrl := util.blobNameFromUrl(blobUrlParts)
// check for special characters and get blobName without special character.
blobNameFromUrl = util.blobPathWOSpecialCharacters(blobNameFromUrl)
blobLocalPath = util.generateLocalPath(cca.destination, blobNameFromUrl)
} else {
blobLocalPath = cca.destination
}
// Add the transfer to CopyJobPartOrderRequest
e.addTransfer(common.CopyTransfer{
Source: util.stripSASFromBlobUrl(*sourceUrl).String(),
Destination: blobLocalPath,
LastModifiedTime: blobProperties.LastModified(),
SourceSize: blobProperties.ContentLength(),
ContentMD5: blobProperties.ContentMD5(),
BlobType: blobProperties.BlobType(),
}, cca)
// only one transfer for this Job, dispatch the JobPart
err := e.dispatchFinalPart(cca)
if err != nil {
return err
}
return nil
} else {
if isFatal := handleSingleFileValidationErrorForBlob(err); isFatal {
return err
}
}
}
glcm.Info(infoCopyFromContainerDirectoryListOfFiles)
// Since the given source url doesn't represent an existing blob
// it is either a container or a virtual directory, so it need to be
// downloaded to an existing directory
// Check if the given destination path is a directory or not.
if !util.isPathALocalDirectory(cca.destination) && !strings.EqualFold(cca.destination, common.Dev_Null) {
return errors.New("the destination must be an existing directory in this download scenario")
}
literalContainerUrl := util.getContainerUrl(blobUrlParts)
containerUrl := azblob.NewContainerURL(literalContainerUrl, p)
// Get the source path without the wildcards
// This is defined since the files mentioned with exclude flag
// & include flag are relative to the Source
// If the source has wildcards, then files are relative to the
// parent source path which is the path of last directory in the source
// without wildcards
// For Example: src = "/home/user/dir1" parentSourcePath = "/home/user/dir1"
// For Example: src = "/home/user/dir*" parentSourcePath = "/home/user"
// For Example: src = "/home/*" parentSourcePath = "/home"
parentSourcePath := blobUrlParts.BlobName
wcIndex := util.firstIndexOfWildCard(parentSourcePath)
if wcIndex != -1 {
parentSourcePath = parentSourcePath[:wcIndex]
pathSepIndex := strings.LastIndex(parentSourcePath, "/")
if pathSepIndex == -1 {
parentSourcePath = ""
} else {
parentSourcePath = parentSourcePath[:pathSepIndex]
}
}
// If the user has provided us with a list of files to be copied explicitly
// then there is no need list using the source and then perform pattern matching.
if len(cca.listOfFilesToCopy) > 0 {
for _, blob := range cca.listOfFilesToCopy {
// copy the blobParts in the temporary blobPart since for each blob mentioned in the listOfFilesToCopy flag
// blobParts will be modified.
tempBlobUrlParts := blobUrlParts
if len(parentSourcePath) > 0 && parentSourcePath[len(parentSourcePath)-1] == common.AZCOPY_PATH_SEPARATOR_CHAR {
parentSourcePath = parentSourcePath[0 : len(parentSourcePath)-1]
}
// Create the blobPath using the given source and blobs mentioned with listOfFile flag.
// For Example:
// 1. source = "https://sdksampleperftest.blob.core.windows.net/bigdata" blob = "file1.txt" blobPath= "file1.txt"
// 2. source = "https://sdksampleperftest.blob.core.windows.net/bigdata/dir-1" blob = "file1.txt" blobPath= "dir-1/file1.txt"
blobPath := fmt.Sprintf("%s%s%s", parentSourcePath, common.AZCOPY_PATH_SEPARATOR_STRING, blob)
if len(blobPath) > 0 && blobPath[0] == common.AZCOPY_PATH_SEPARATOR_CHAR {
blobPath = blobPath[1:]
}
tempBlobUrlParts.BlobName = blobPath
blobURL := azblob.NewBlobURL(tempBlobUrlParts.URL(), p)
blobProperties, err := blobURL.GetProperties(ctx, azblob.BlobAccessConditions{})
if err == nil {
// If the blob represents a folder as per the conditions mentioned in the
// api doesBlobRepresentAFolder, then skip the blob.
if util.doesBlobRepresentAFolder(blobProperties.NewMetadata()) {
continue
}
blobRelativePath := strings.Replace(blobPath, parentSourcePath, "", 1)
if len(blobRelativePath) > 0 && blobRelativePath[0] == common.AZCOPY_PATH_SEPARATOR_CHAR {
blobRelativePath = blobRelativePath[1:]
}
// check for the special character in blob relative path and get path without special character.
blobRelativePath = util.blobPathWOSpecialCharacters(blobRelativePath)
e.addTransfer(common.CopyTransfer{
Source: util.stripSASFromBlobUrl(util.createBlobUrlFromContainer(blobUrlParts, blobPath)).String(),
Destination: util.generateLocalPath(cca.destination, blobRelativePath),
LastModifiedTime: blobProperties.LastModified(),
SourceSize: blobProperties.ContentLength(),
ContentMD5: blobProperties.ContentMD5(),
BlobType: blobProperties.BlobType(),
}, cca)
continue
}
if !cca.recursive {
glcm.Info(fmt.Sprintf("error fetching properties of %s. Either it is a directory or getting the blob properties failed. For virtual directories try using the recursive flag", blobPath))
continue
}
// Since the given blob in the listOFFiles flag is not a blob, it can be a virtual directory
// If the virtual directory doesn't have a path separator at the end of it, then we should append it.
// This is done to avoid listing blobs which shares the common prefix i.e the virtual directory name.
// For Example:
// 1. source = "https://sdksampleperftest.blob.core.windows.net/bigdata" blob="100k". In this case, it is
// a possibility that we have blobs https://sdksampleperftest.blob.core.windows.net/bigdata/100K and
// https://sdksampleperftest.blob.core.windows.net/bigdata/100K/f1.txt. So we need to list the blob
// https://sdksampleperftest.blob.core.windows.net/bigdata/100K/f1.txt
searchPrefix := tempBlobUrlParts.BlobName
if len(searchPrefix) > 0 && searchPrefix[len(searchPrefix)-1] != common.AZCOPY_PATH_SEPARATOR_CHAR {
searchPrefix += common.AZCOPY_PATH_SEPARATOR_STRING
}
for marker := (azblob.Marker{}); marker.NotDone(); {
// look for all blobs that start with the prefix, so that if a blob is under the virtual directory, it will show up
listBlob, err := containerUrl.ListBlobsFlatSegment(ctx, marker,
azblob.ListBlobsSegmentOptions{Details: azblob.BlobListingDetails{Metadata: true}, Prefix: searchPrefix})
if err != nil {
glcm.Info(fmt.Sprintf("cannot list blobs inside directory %s mentioned.", searchPrefix))
continue
}
// If there was no blob listed inside the directory mentioned in the listOfFilesToCopy flag,
// report to the user and continue to the next blob mentioned.
if !listBlob.NextMarker.NotDone() && len(listBlob.Segment.BlobItems) == 0 {
glcm.Info(fmt.Sprintf("cannot list blobs inside directory %s mentioned.", searchPrefix))
break
}
for _, blobInfo := range listBlob.Segment.BlobItems {
// If the blob represents a folder as per the conditions mentioned in the
// api doesBlobRepresentAFolder, then skip the blob.
if util.doesBlobRepresentAFolder(blobInfo.Metadata) {
continue
}
blobRelativePath := strings.Replace(blobInfo.Name, parentSourcePath, "", 1)
if len(blobRelativePath) > 0 && blobRelativePath[0] == common.AZCOPY_PATH_SEPARATOR_CHAR {
blobRelativePath = blobRelativePath[1:]
}
//blobRelativePath := util.getRelativePath(parentSourcePath, blobInfo.Name)
// check for the special character in blob relative path and get path without special character.
blobRelativePath = util.blobPathWOSpecialCharacters(blobRelativePath)
e.addTransfer(common.CopyTransfer{
Source: util.stripSASFromBlobUrl(util.createBlobUrlFromContainer(blobUrlParts, blobInfo.Name)).String(),
Destination: util.generateLocalPath(cca.destination, blobRelativePath),
LastModifiedTime: blobInfo.Properties.LastModified,
SourceSize: *blobInfo.Properties.ContentLength,
ContentMD5: blobInfo.Properties.ContentMD5,
BlobType: blobInfo.Properties.BlobType,
}, cca)
}
marker = listBlob.NextMarker
}
}
// If there are no transfer to queue up, exit with message
if len(e.Transfers) == 0 {
glcm.Error(fmt.Sprintf("no transfer queued for copying data from %s to %s", cca.source, cca.destination))
return nil
}
// dispatch the JobPart as Final Part of the Job
err = e.dispatchFinalPart(cca)
if err != nil {
return err
}
return nil
}
// searchPrefix is the used in listing blob inside a container
// all the blob listed should have the searchPrefix as the prefix
// blobNamePattern represents the regular expression which the blobName should Match
searchPrefix, blobNamePattern, isWildcardSearch := srcBlobURLPartsExtension.searchPrefixFromBlobURL()
// If blobNamePattern is "*", means that all the contents inside the given source url recursively needs to be downloaded
// It means that source url provided is either a container or a virtual directory
// All the blobs inside a container or virtual directory will be downloaded only when the recursive flag is set to true
if blobNamePattern == "*" && !cca.recursive && !isWildcardSearch {
return fmt.Errorf("cannot download the entire container / virtual directory. Please use --recursive flag")
}
// if downloading entire container, then create a local directory with the container's name
if blobUrlParts.BlobName == "" {
cca.destination = util.generateLocalPath(cca.destination, blobUrlParts.ContainerName)
}
// perform a list blob with search prefix
for marker := (azblob.Marker{}); marker.NotDone(); {
// look for all blobs that start with the prefix, so that if a blob is under the virtual directory, it will show up
listBlob, err := containerUrl.ListBlobsFlatSegment(ctx, marker,
azblob.ListBlobsSegmentOptions{Details: azblob.BlobListingDetails{Metadata: true}, Prefix: searchPrefix})
if err != nil {
return fmt.Errorf("cannot list blobs for download. Failed with error %s", err.Error())
}
// Process the blobs returned in this result segment (if the segment is empty, the loop body won't execute)
for _, blobInfo := range listBlob.Segment.BlobItems {
// If the blob represents a folder as per the conditions mentioned in the
// api doesBlobRepresentAFolder, then skip the blob.
if util.doesBlobRepresentAFolder(blobInfo.Metadata) {
continue
}
// If the blobName doesn't matches the blob name pattern, then blob is not included
// queued for transfer
if !util.matchBlobNameAgainstPattern(blobNamePattern, blobInfo.Name, cca.recursive) {
continue
}
// Check the blob should be included or not
if !util.resourceShouldBeIncluded(parentSourcePath, e.Include, blobInfo.Name) {
continue
}
// Check the blob should be excluded or not
if util.resourceShouldBeExcluded(parentSourcePath, e.Exclude, blobInfo.Name) {
continue
}
// If wildcard exists in the source, searchPrefix is the source string till the first wildcard index
// In case of wildcards in source string, there is no need to create the last virtal directory in the searchPrefix
// locally.
// blobRelativePath will be as follow
// source = https://<container>/<vd-1>/*?<signature> blobName = /vd-1/dir/1.txt
// blobRelativePath = dir/1.txt
// source = https://<container>/<vd-1>/dir/*.txt?<signature> blobName = /vd-1/dir/1.txt
// blobRelativePath = 1.txt
// source = https://<container>/<vd-1>/dir/*/*.txt?<signature> blobName = /vd-1/dir/dir1/1.txt
// blobRelativePath = dir1/1.txt
var blobRelativePath = ""
if util.firstIndexOfWildCard(blobUrlParts.BlobName) != -1 {
blobRelativePath = strings.Replace(blobInfo.Name, searchPrefix[:strings.LastIndex(searchPrefix, common.AZCOPY_PATH_SEPARATOR_STRING)+1], "", 1)
} else {
blobRelativePath = util.getRelativePath(searchPrefix, blobInfo.Name)
}
// check for the special character in blob relative path and get path without special character.
blobRelativePath = util.blobPathWOSpecialCharacters(blobRelativePath)
e.addTransfer(common.CopyTransfer{
Source: util.stripSASFromBlobUrl(util.createBlobUrlFromContainer(blobUrlParts, blobInfo.Name)).String(),
Destination: util.generateLocalPath(cca.destination, blobRelativePath),
LastModifiedTime: blobInfo.Properties.LastModified,
SourceSize: *blobInfo.Properties.ContentLength,
ContentMD5: blobInfo.Properties.ContentMD5,
BlobType: blobInfo.Properties.BlobType,
}, cca)
}
marker = listBlob.NextMarker
}
// If part number is 0 && number of transfer queued is 0
// it means that no job part has been dispatched and there are no
// transfer in Job to dispatch a JobPart.
if e.PartNum == 0 && len(e.Transfers) == 0 {
return fmt.Errorf("no transfer queued to download. Please verify the source / destination")
}
// dispatch the JobPart as Final Part of the Job
err = e.dispatchFinalPart(cca)
if err != nil {
return err
}
return nil
}
func (e *copyDownloadBlobEnumerator) addTransfer(transfer common.CopyTransfer, cca *cookedCopyCmdArgs) error {
// if we are downloading to dev null, we must point to devNull itself, rather than some file under it
if strings.EqualFold(e.DestinationRoot, common.Dev_Null) {
transfer.Destination = ""
}
return addTransfer((*common.CopyJobPartOrderRequest)(e), transfer, cca)
}
func (e *copyDownloadBlobEnumerator) dispatchFinalPart(cca *cookedCopyCmdArgs) error {
return dispatchFinalPart((*common.CopyJobPartOrderRequest)(e), cca)
}
<file_sep>package azbfs_test
import (
"context"
"os"
"github.com/Azure/azure-storage-azcopy/azbfs"
chk "gopkg.in/check.v1"
"net/http"
"net/url"
)
type FileSystemURLSuite struct{}
var _ = chk.Suite(&FileSystemURLSuite{})
func delFileSystem(c *chk.C, fs azbfs.FileSystemURL) {
resp, err := fs.Delete(context.Background())
c.Assert(err, chk.IsNil)
c.Assert(resp.Response().StatusCode, chk.Equals, http.StatusAccepted)
}
func (s *FileSystemURLSuite) TestFileSystemCreateRootDirectoryURL(c *chk.C) {
fsu := getBfsServiceURL()
testURL := fsu.NewFileSystemURL(fileSystemPrefix).NewRootDirectoryURL()
correctURL := "https://" + os.Getenv("ACCOUNT_NAME") + ".dfs.core.windows.net/" + fileSystemPrefix
temp := testURL.URL()
c.Assert(temp.String(), chk.Equals, correctURL)
}
func (s *FileSystemURLSuite) TestFileSystemCreateDirectoryURL(c *chk.C) {
fsu := getBfsServiceURL()
testURL := fsu.NewFileSystemURL(fileSystemPrefix).NewDirectoryURL(directoryPrefix)
correctURL := "https://" + os.Getenv("ACCOUNT_NAME") + ".dfs.core.windows.net/" + fileSystemPrefix + "/" + directoryPrefix
temp := testURL.URL()
c.Assert(temp.String(), chk.Equals, correctURL)
c.Assert(testURL.String(), chk.Equals, correctURL)
}
func (s *FileSystemURLSuite) TestFileSystemNewFileSystemURLNegative(c *chk.C) {
c.Assert(func() { azbfs.NewFileSystemURL(url.URL{}, nil) }, chk.Panics, "p can't be nil")
}
func (s *FileSystemURLSuite) TestFileSystemCreateDelete(c *chk.C) {
fsu := getBfsServiceURL()
fileSystemURL, _ := getFileSystemURL(c, fsu)
_, err := fileSystemURL.Create(ctx)
defer delFileSystem(c, fileSystemURL)
c.Assert(err, chk.IsNil)
// Test get properties
resp, err := fileSystemURL.GetProperties(ctx)
c.Assert(resp.StatusCode(), chk.Equals, http.StatusOK)
c.Assert(err, chk.IsNil)
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"bytes"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
)
type blockBlobUploader struct {
blockBlobSenderBase
md5Channel chan []byte
}
func newBlockBlobUploader(jptm IJobPartTransferMgr, destination string, p pipeline.Pipeline, pacer *pacer, sip ISourceInfoProvider) (ISenderBase, error) {
senderBase, err := newBlockBlobSenderBase(jptm, destination, p, pacer, sip, azblob.AccessTierNone)
if err != nil {
return nil, err
}
return &blockBlobUploader{blockBlobSenderBase: *senderBase, md5Channel: newMd5Channel()}, nil
}
func (u *blockBlobUploader) Md5Channel() chan<- []byte {
return u.md5Channel
}
// Returns a chunk-func for blob uploads
func (u *blockBlobUploader) GenerateUploadFunc(id common.ChunkID, blockIndex int32, reader common.SingleChunkReader, chunkIsWholeFile bool) chunkFunc {
if chunkIsWholeFile {
if blockIndex > 0 {
panic("chunk cannot be whole file where there is more than one chunk")
}
setPutListNeed(&u.atomicPutListIndicator, putListNotNeeded)
return u.generatePutWholeBlob(id, blockIndex, reader)
} else {
setPutListNeed(&u.atomicPutListIndicator, putListNeeded)
return u.generatePutBlock(id, blockIndex, reader)
}
}
// generatePutBlock generates a func to upload the block of src data from given startIndex till the given chunkSize.
func (u *blockBlobUploader) generatePutBlock(id common.ChunkID, blockIndex int32, reader common.SingleChunkReader) chunkFunc {
return createSendToRemoteChunkFunc(u.jptm, id, func() {
// step 1: generate block ID
encodedBlockID := u.generateEncodedBlockID()
// step 2: save the block ID into the list of block IDs
u.setBlockID(blockIndex, encodedBlockID)
// step 3: put block to remote
u.jptm.LogChunkStatus(id, common.EWaitReason.Body())
body := newLiteRequestBodyPacer(reader, u.pacer)
_, err := u.destBlockBlobURL.StageBlock(u.jptm.Context(), encodedBlockID, body, azblob.LeaseAccessConditions{}, nil)
if err != nil {
u.jptm.FailActiveUpload("Staging block", err)
return
}
})
}
// generates PUT Blob (for a blob that fits in a single put request)
func (u *blockBlobUploader) generatePutWholeBlob(id common.ChunkID, blockIndex int32, reader common.SingleChunkReader) chunkFunc {
return createSendToRemoteChunkFunc(u.jptm, id, func() {
jptm := u.jptm
// Upload the blob
jptm.LogChunkStatus(id, common.EWaitReason.Body())
var err error
if jptm.Info().SourceSize == 0 {
_, err = u.destBlockBlobURL.Upload(jptm.Context(), bytes.NewReader(nil), u.headersToApply, u.metadataToApply, azblob.BlobAccessConditions{})
} else {
// File with content
// Get the MD5 that was computed as we read the file
md5Hash, ok := <-u.md5Channel
if !ok {
jptm.FailActiveUpload("Getting hash", errNoHash)
return
}
u.headersToApply.ContentMD5 = md5Hash
// Upload the file
body := newLiteRequestBodyPacer(reader, u.pacer)
_, err = u.destBlockBlobURL.Upload(jptm.Context(), body, u.headersToApply, u.metadataToApply, azblob.BlobAccessConditions{})
}
// if the put blob is a failure, update the transfer status to failed
if err != nil {
jptm.FailActiveUpload("Uploading blob", err)
return
}
})
}
func (u *blockBlobUploader) Epilogue() {
jptm := u.jptm
shouldPutBlockList := getPutListNeed(&u.atomicPutListIndicator)
if jptm.TransferStatus() > 0 && shouldPutBlockList == putListNeeded {
md5Hash, ok := <-u.md5Channel
if ok {
u.headersToApply.ContentMD5 = md5Hash
} else {
jptm.FailActiveSend("Getting hash", errNoHash)
// don't return, since need cleanup below
}
}
u.blockBlobSenderBase.Epilogue()
}
<file_sep>package cmd
import (
"context"
"crypto/md5"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"os"
"strings"
"time"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-azcopy/ste"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/spf13/cobra"
)
// TestBlobCommand represents the struct to get command
// for validating azcopy operations.
// defaultServiceApiVersion is the default value of service api version that is set as value to the ServiceAPIVersionOverride in every Job's context.
const defaultServiceApiVersion = "2017-04-17"
// todo check the number of contents uploaded while verifying.
type TestBlobCommand struct {
// object is the resource which needs to be validated against a resource on container.
Object string
//Subject is the remote resource against which object needs to be validated.
Subject string
// IsObjectDirectory defines if the object is a directory or not.
// If the object is directory, then validation goes through another path.
IsObjectDirectory bool
// Metadata of the blob to be validated.
MetaData string
// NoGuessMimeType represent the azcopy NoGuessMimeType flag set while uploading the blob.
NoGuessMimeType bool
// Represents the flag to determine whether number of blocks or pages needs
// to be verified or not.
// todo always set this to true
VerifyBlockOrPageSize bool
// BlobType of the resource to be validated.
BlobType string
// access tier for block blobs
BlobTier string
// Number of Blocks or Pages Expected from the blob.
NumberOfBlocksOrPages uint64
// todo : numberofblockorpages can be an array with offset : end url.
//todo consecutive page ranges get squashed.
// PreserveLastModifiedTime represents the azcopy PreserveLastModifiedTime flag while downloading the blob.
PreserveLastModifiedTime bool
// Property of the blob to be validated.
ContentType string
ContentEncoding string
ContentDisposition string
ContentLanguage string
CacheControl string
CheckContentMD5 bool
CheckContentType bool
}
// initializes the testblob command, its aliases and description.
// also adds the possible flags that can be supplied with testBlob command.
func init() {
cmdInput := TestBlobCommand{}
testBlobCmd := &cobra.Command{
Use: "testBlob",
Aliases: []string{"tBlob"},
Short: "tests the blob created using AZCopy v2",
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 2 {
return fmt.Errorf("invalid arguments for test blob command")
}
// first argument is the resource name.
cmdInput.Object = args[0]
// second argument is the test directory.
cmdInput.Subject = args[1]
return nil
},
Run: func(cmd *cobra.Command, args []string) {
verifyBlob(cmdInput)
},
}
rootCmd.AddCommand(testBlobCmd)
// add flags.
testBlobCmd.PersistentFlags().StringVar(&cmdInput.MetaData, "metadata", "", "metadata expected from the blob in the container")
testBlobCmd.PersistentFlags().StringVar(&cmdInput.ContentType, "content-type", "", "content type expected from the blob in the container")
testBlobCmd.PersistentFlags().StringVar(&cmdInput.ContentEncoding, "content-encoding", "", "Validate content encoding.")
testBlobCmd.PersistentFlags().StringVar(&cmdInput.ContentDisposition, "content-disposition", "", "Validate content disposition.")
testBlobCmd.PersistentFlags().StringVar(&cmdInput.ContentLanguage, "content-language", "", "Validate content language.")
testBlobCmd.PersistentFlags().StringVar(&cmdInput.CacheControl, "cache-control", "", "Validate cache control.")
testBlobCmd.PersistentFlags().BoolVar(&cmdInput.CheckContentMD5, "check-content-md5", false, "Validate content MD5.")
testBlobCmd.PersistentFlags().BoolVar(&cmdInput.IsObjectDirectory, "is-object-dir", false, "set the type of object to verify against the subject")
testBlobCmd.PersistentFlags().Uint64Var(&cmdInput.NumberOfBlocksOrPages, "number-blocks-or-pages", 0, "Use this block size to verify the number of blocks uploaded")
testBlobCmd.PersistentFlags().BoolVar(&cmdInput.VerifyBlockOrPageSize, "verify-block-size", false, "this flag verify the block size by determining the number of blocks")
testBlobCmd.PersistentFlags().BoolVar(&cmdInput.NoGuessMimeType, "no-guess-mime-type", false, "This sets the content-type based on the extension of the file.")
testBlobCmd.PersistentFlags().StringVar(&cmdInput.BlobType, "blob-type", "BlockBlob", "Upload to Azure Storage using this blob type.")
testBlobCmd.PersistentFlags().StringVar(&cmdInput.BlobTier, "blob-tier", string(azblob.AccessTierNone), "access tier type for the block blob")
testBlobCmd.PersistentFlags().BoolVar(&cmdInput.PreserveLastModifiedTime, "preserve-last-modified-time", false, "Only available when destination is file system.")
testBlobCmd.PersistentFlags().BoolVar(&cmdInput.CheckContentType, "check-content-type", false, "Validate content type.")
}
// verify the blob downloaded or uploaded.
func verifyBlob(testBlobCmd TestBlobCommand) {
if testBlobCmd.BlobType == "PageBlob" {
verifySinglePageBlobUpload(testBlobCmd)
} else if testBlobCmd.BlobType == "AppendBlob" {
verifySingleAppendBlob(testBlobCmd)
} else {
if testBlobCmd.IsObjectDirectory {
verifyBlockBlobDirUpload(testBlobCmd)
} else {
verifySingleBlockBlob(testBlobCmd)
}
}
}
// verifyBlockBlobDirUpload verifies the directory recursively uploaded to the container.
func verifyBlockBlobDirUpload(testBlobCmd TestBlobCommand) {
// parse the subject url.
sasUrl, err := url.Parse(testBlobCmd.Subject)
if err != nil {
fmt.Println("error parsing the container sas ", testBlobCmd.Subject)
os.Exit(1)
}
containerName := strings.SplitAfterN(sasUrl.Path[1:], "/", 2)[0]
sasUrl.Path = "/" + containerName
// Create Pipeline to Get the Blob Properties or List Blob Segment
p := ste.NewBlobPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{
Telemetry: azblob.TelemetryOptions{
Value: common.UserAgent,
},
},
ste.XferRetryOptions{
Policy: 0,
MaxTries: ste.UploadMaxTries,
TryTimeout: 10 * time.Minute,
RetryDelay: ste.UploadRetryDelay,
MaxRetryDelay: ste.UploadMaxRetryDelay},
nil,
ste.NewAzcopyHTTPClient())
containerUrl := azblob.NewContainerURL(*sasUrl, p)
testCtx := context.WithValue(context.Background(), ste.ServiceAPIVersionOverride, defaultServiceApiVersion)
// perform a list blob with search prefix "dirname/"
dirName := strings.Split(testBlobCmd.Object, "/")
searchPrefix := dirName[len(dirName)-1] + "/"
for marker := (azblob.Marker{}); marker.NotDone(); {
// look for all blobs that start with the prefix, so that if a blob is under the virtual directory, it will show up
listBlob, err := containerUrl.ListBlobsFlatSegment(testCtx, marker, azblob.ListBlobsSegmentOptions{Prefix: searchPrefix})
if err != nil {
fmt.Println("error listing blobs inside the container. Please check the container sas")
os.Exit(1)
}
// Process the blobs returned in this result segment (if the segment is empty, the loop body won't execute)
for _, blobInfo := range listBlob.Segment.BlobItems {
// get the blob
size := blobInfo.Properties.ContentLength
get, err := containerUrl.NewBlobURL(blobInfo.Name).Download(testCtx,
0, *size, azblob.BlobAccessConditions{}, false)
if err != nil {
fmt.Println(fmt.Sprintf("error downloading the blob %s", blobInfo.Name))
os.Exit(1)
}
// read all bytes.
blobBytesDownloaded, err := ioutil.ReadAll(get.Body(azblob.RetryReaderOptions{}))
if err != nil {
fmt.Println(fmt.Sprintf("error reading the body of blob %s downloaded and failed with error %s", blobInfo.Name, err.Error()))
os.Exit(1)
}
// remove the search prefix from the blob name
blobName := strings.Replace(blobInfo.Name, searchPrefix, "", 1)
// blob path on local disk.
objectLocalPath := testBlobCmd.Object + string(os.PathSeparator) + blobName
// opening the file locally and memory mapping it.
sFileInfo, err := os.Stat(objectLocalPath)
if err != nil {
fmt.Println("error geting the subject blob file info on local disk ")
os.Exit(1)
}
sFile, err := os.Open(objectLocalPath)
if err != nil {
fmt.Println("error opening file ", sFile)
os.Exit(1)
}
sMap, err := NewMMF(sFile, false, 0, int64(sFileInfo.Size()))
if err != nil {
fmt.Println("error memory mapping the file ", sFileInfo.Name())
}
// calculating the md5 of blob on container.
actualMd5 := md5.Sum(blobBytesDownloaded)
// calculating md5 of resource locally.
expectedMd5 := md5.Sum(sMap)
if actualMd5 != expectedMd5 {
fmt.Println("the upload blob md5 is not equal to the md5 of actual blob on disk for blob ", blobInfo.Name)
os.Exit(1)
}
}
marker = listBlob.NextMarker
}
}
// validateMetadata compares the meta data provided while
// uploading and metadata with blob in the container.
func validateMetadata(expectedMetaDataString string, actualMetaData azblob.Metadata) bool {
if len(expectedMetaDataString) > 0 {
// split the meta data string to get the map of key value pair
// metadata string is in format key1=value1;key2=value2;key3=value3
expectedMetaData := azblob.Metadata{}
// split the metadata to get individual keyvalue pair in format key1=value1
keyValuePair := strings.Split(expectedMetaDataString, ";")
for index := 0; index < len(keyValuePair); index++ {
// split the individual key value pair to get key and value
keyValue := strings.Split(keyValuePair[index], "=")
expectedMetaData[keyValue[0]] = keyValue[1]
}
// if number of metadata provided while uploading
// doesn't match the metadata with blob on the container
if len(expectedMetaData) != len(actualMetaData) {
fmt.Println("number of user given key value pair of the actual metadata differs from key value pair of expected metaData")
return false
}
// iterating through each key value pair of actual metaData and comparing the key value pair in expected metadata
for key, value := range actualMetaData {
if expectedMetaData[key] != value {
fmt.Println(fmt.Sprintf("value of user given key %s is %s in actual data while it is %s in expected metadata", key, value, expectedMetaData[key]))
return false
}
}
} else {
if len(actualMetaData) > 0 {
return false
}
}
return true
}
// verifySinglePageBlobUpload verifies the pageblob uploaded or downloaded
// against the blob locally.
func verifySinglePageBlobUpload(testBlobCmd TestBlobCommand) {
fileInfo, err := os.Stat(testBlobCmd.Object)
if err != nil {
fmt.Println("error opening the destination blob on local disk ")
os.Exit(1)
}
file, err := os.Open(testBlobCmd.Object)
if err != nil {
fmt.Println("error opening the file ", testBlobCmd.Object)
}
// getting the shared access signature of the resource.
sourceURL, err := url.Parse(testBlobCmd.Subject)
if err != nil {
fmt.Println(fmt.Sprintf("Error parsing the blob url source %s", testBlobCmd.Object))
os.Exit(1)
}
// creating the page blob url of the resource on container.
// Create Pipeline to Get the Blob Properties or List Blob Segment
p := ste.NewBlobPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{
Telemetry: azblob.TelemetryOptions{
Value: common.UserAgent,
},
},
ste.XferRetryOptions{
Policy: 0,
MaxTries: ste.UploadMaxTries,
TryTimeout: 10 * time.Minute,
RetryDelay: ste.UploadRetryDelay,
MaxRetryDelay: ste.UploadMaxRetryDelay},
nil,
ste.NewAzcopyHTTPClient())
testCtx := context.WithValue(context.Background(), ste.ServiceAPIVersionOverride, defaultServiceApiVersion)
pageBlobUrl := azblob.NewPageBlobURL(*sourceURL, p)
// get the blob properties and check the blob tier.
if azblob.AccessTierType(testBlobCmd.BlobTier) != azblob.AccessTierNone {
blobProperties, err := pageBlobUrl.GetProperties(testCtx, azblob.BlobAccessConditions{})
if err != nil {
fmt.Println(fmt.Sprintf("error getting the properties of the blob. failed with error %s", err.Error()))
os.Exit(1)
}
// If the blob tier does not match the expected blob tier.
if !strings.EqualFold(blobProperties.AccessTier(), testBlobCmd.BlobTier) {
fmt.Println(fmt.Sprintf("Access blob tier type %s does not match the expected %s tier type", blobProperties.AccessTier(), testBlobCmd.BlobTier))
os.Exit(1)
}
// Closing the blobProperties response body.
if blobProperties.Response() != nil {
io.Copy(ioutil.Discard, blobProperties.Response().Body)
blobProperties.Response().Body.Close()
}
}
get, err := pageBlobUrl.Download(testCtx, 0, fileInfo.Size(), azblob.BlobAccessConditions{}, false)
if err != nil {
fmt.Println("unable to get blob properties ", err.Error())
os.Exit(1)
}
// reading all the bytes downloaded.
blobBytesDownloaded, err := ioutil.ReadAll(get.Body(azblob.RetryReaderOptions{}))
if get.Response().Body != nil {
get.Response().Body.Close()
}
if err != nil {
fmt.Println("error reading the byes from response and failed with error ", err.Error())
os.Exit(1)
}
expectedContentType := ""
if testBlobCmd.NoGuessMimeType {
expectedContentType = testBlobCmd.ContentType
}
if len(blobBytesDownloaded) != 0 {
// memory mapping the resource on local path.
mmap, err := NewMMF(file, false, 0, fileInfo.Size())
if err != nil {
fmt.Println("error mapping the destination blob file ", err.Error())
os.Exit(1)
}
// calculating and verify the md5 of the resource
// both locally and on the container.
actualMd5 := md5.Sum(mmap)
expectedMd5 := md5.Sum(blobBytesDownloaded)
if actualMd5 != expectedMd5 {
fmt.Println("the uploaded blob's md5 doesn't matches the actual blob's md5 for blob ", testBlobCmd.Object)
os.Exit(1)
}
if !testBlobCmd.NoGuessMimeType {
expectedContentType = http.DetectContentType(mmap)
}
mmap.Unmap()
}
// verify the content-type
if testBlobCmd.CheckContentType && !validateString(expectedContentType, get.ContentType()) {
fmt.Printf(
"mismatch content type between actual and user given blob content type, expected %q, actually %q\n",
expectedContentType,
get.ContentType())
os.Exit(1)
}
// verify the user given metadata supplied while uploading the blob against the metadata actually present in the blob
if !validateMetadata(testBlobCmd.MetaData, get.NewMetadata()) {
fmt.Println("meta data does not match between the actual and uploaded blob.")
os.Exit(1)
}
//verify the content-encoding
if !validateString(testBlobCmd.ContentEncoding, get.ContentEncoding()) {
fmt.Println("mismatch ContentEncoding between actual and user given blob")
os.Exit(1)
}
if !validateString(testBlobCmd.CacheControl, get.CacheControl()) {
fmt.Println("mismatch CacheControl between actual and user given blob")
os.Exit(1)
}
if !validateString(testBlobCmd.ContentDisposition, get.ContentDisposition()) {
fmt.Println("mismatch ContentDisposition between actual and user given blob")
os.Exit(1)
}
if !validateString(testBlobCmd.ContentLanguage, get.ContentLanguage()) {
fmt.Println("mismatch ContentLanguage between actual and user given blob")
os.Exit(1)
}
if testBlobCmd.CheckContentMD5 && (get.ContentMD5() == nil || len(get.ContentMD5()) == 0) {
fmt.Println("ContentMD5 should not be empty")
os.Exit(1)
}
file.Close()
// verify the number of pageranges.
// this verifies the page-size and azcopy pageblob implementation.
if testBlobCmd.VerifyBlockOrPageSize {
numberOfPages := int(testBlobCmd.NumberOfBlocksOrPages)
resp, err := pageBlobUrl.GetPageRanges(testCtx, 0, 0, azblob.BlobAccessConditions{})
if err != nil {
fmt.Println("error getting the block blob list ", err.Error())
os.Exit(1)
}
if numberOfPages != (len(resp.PageRange)) {
fmt.Println("number of blocks to be uploaded is different from the number of expected to be uploaded")
os.Exit(1)
}
}
}
// verifySingleBlockBlob verifies the blockblob uploaded or downloaded
// against the blob locally.
// todo close the file as soon as possible.
func verifySingleBlockBlob(testBlobCmd TestBlobCommand) {
// opening the resource on local path in test directory.
objectLocalPath := testBlobCmd.Object
fileInfo, err := os.Stat(objectLocalPath)
if err != nil {
fmt.Println("error opening the destination blob on local disk ")
os.Exit(1)
}
file, err := os.Open(objectLocalPath)
if err != nil {
fmt.Println("error opening the file ", objectLocalPath)
}
// getting the shared access signature of the resource.
sourceSas := testBlobCmd.Subject
fmt.Println("source sas ", sourceSas)
sourceURL, err := url.Parse(sourceSas)
if err != nil {
fmt.Println(fmt.Sprintf("Error parsing the blob url source %s", testBlobCmd.Object))
os.Exit(1)
}
// creating the blockblob url of the resource on container.
// Create Pipeline to Get the Blob Properties or List Blob Segment
p := ste.NewBlobPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{
Telemetry: azblob.TelemetryOptions{
Value: common.UserAgent,
},
},
ste.XferRetryOptions{
Policy: 0,
MaxTries: ste.UploadMaxTries,
TryTimeout: 10 * time.Minute,
RetryDelay: ste.UploadRetryDelay,
MaxRetryDelay: ste.UploadMaxRetryDelay},
nil,
ste.NewAzcopyHTTPClient())
testCtx := context.WithValue(context.Background(), ste.ServiceAPIVersionOverride, defaultServiceApiVersion)
blobUrl := azblob.NewBlobURL(*sourceURL, p)
// check for access tier type
// get the blob properties and get the Access Tier Type.
if azblob.AccessTierType(testBlobCmd.BlobTier) != azblob.AccessTierNone {
blobProperties, err := blobUrl.GetProperties(testCtx, azblob.BlobAccessConditions{})
if err != nil {
fmt.Println(fmt.Sprintf("error getting the blob properties. Failed with error %s", err.Error()))
os.Exit(1)
}
// Match the Access Tier Type with Expected Tier Type.
if !strings.EqualFold(blobProperties.AccessTier(), testBlobCmd.BlobTier) {
fmt.Println(fmt.Sprintf("block blob access tier %s does not matches the expected tier %s", blobProperties.AccessTier(), testBlobCmd.BlobTier))
os.Exit(1)
}
// Closing the blobProperties response.
if blobProperties.Response() != nil {
io.Copy(ioutil.Discard, blobProperties.Response().Body)
blobProperties.Response().Body.Close()
}
// If the access tier type of blob is set to Archive, then the blob is offline and reading the blob is not allowed,
// so exit the test.
if azblob.AccessTierType(testBlobCmd.BlobTier) == azblob.AccessTierArchive {
os.Exit(0)
}
}
get, err := blobUrl.Download(testCtx, 0, fileInfo.Size(), azblob.BlobAccessConditions{}, false)
if err != nil {
fmt.Println("unable to get blob properties ", err.Error())
os.Exit(1)
}
// reading all the blob bytes.
blobBytesDownloaded, err := ioutil.ReadAll(get.Body(azblob.RetryReaderOptions{}))
if get.Response().Body != nil {
get.Response().Body.Close()
}
if err != nil {
fmt.Println("error reading the byes from response and failed with error ", err.Error())
os.Exit(1)
}
if fileInfo.Size() == 0 {
// If the fileSize is 0 and the len of downloaded bytes is not 0
// validation fails
if len(blobBytesDownloaded) != 0 {
fmt.Println(fmt.Sprintf("validation failed since the actual file size %d differs from the downloaded file size %d", fileInfo.Size(), len(blobBytesDownloaded)))
os.Exit(1)
}
// If both the actual and downloaded file size is 0,
// validation is successful, no need to match the md5
os.Exit(0)
}
// memory mapping the resource on local path.
mmap, err := NewMMF(file, false, 0, fileInfo.Size())
if err != nil {
fmt.Println("error mapping the destination blob file ", err.Error())
os.Exit(1)
}
// calculating and verify the md5 of the resource
// both locally and on the container.
actualMd5 := md5.Sum(mmap)
expectedMd5 := md5.Sum(blobBytesDownloaded)
if actualMd5 != expectedMd5 {
fmt.Println("the uploaded blob's md5 doesn't matches the actual blob's md5")
os.Exit(1)
}
// verify the user given metadata supplied while uploading the blob against the metadata actually present in the blob
if !validateMetadata(testBlobCmd.MetaData, get.NewMetadata()) {
fmt.Println("meta data does not match between the actual and uploaded blob.")
os.Exit(1)
}
// verify the content-type
expectedContentType := ""
if testBlobCmd.NoGuessMimeType {
expectedContentType = testBlobCmd.ContentType
} else {
expectedContentType = http.DetectContentType(mmap)
}
if testBlobCmd.CheckContentType && !validateString(expectedContentType, get.ContentType()) {
fmt.Printf(
"mismatch content type between actual and user given blob content type, expected %q, actually %q\n",
expectedContentType,
get.ContentType())
os.Exit(1)
}
//verify the content-encoding
if !validateString(testBlobCmd.ContentEncoding, get.ContentEncoding()) {
fmt.Println("mismatch content encoding between actual and user given blob content encoding")
os.Exit(1)
}
if testBlobCmd.PreserveLastModifiedTime {
if fileInfo.ModTime().Unix() != get.LastModified().Unix() {
fmt.Println("modified time of downloaded and actual blob does not match")
os.Exit(1)
}
}
// unmap and closing the memory map file.
mmap.Unmap()
err = file.Close()
if err != nil {
fmt.Println(fmt.Sprintf("error closing the file %s and failed with error %s. Error could be while validating the blob.", file.Name(), err.Error()))
os.Exit(1)
}
// verify the block size
if testBlobCmd.VerifyBlockOrPageSize {
blockBlobUrl := azblob.NewBlockBlobURL(*sourceURL, p)
numberOfBlocks := int(testBlobCmd.NumberOfBlocksOrPages)
resp, err := blockBlobUrl.GetBlockList(testCtx, azblob.BlockListNone, azblob.LeaseAccessConditions{})
if err != nil {
fmt.Println("error getting the block blob list")
os.Exit(1)
}
// todo only commited blocks
if numberOfBlocks != (len(resp.CommittedBlocks)) {
fmt.Println("number of blocks to be uploaded is different from the number of expected to be uploaded")
os.Exit(1)
}
}
}
func verifySingleAppendBlob(testBlobCmd TestBlobCommand) {
fileInfo, err := os.Stat(testBlobCmd.Object)
if err != nil {
fmt.Println("error opening the destination blob on local disk ")
os.Exit(1)
}
file, err := os.Open(testBlobCmd.Object)
if err != nil {
fmt.Println("error opening the file ", testBlobCmd.Object)
}
// getting the shared access signature of the resource.
sourceURL, err := url.Parse(testBlobCmd.Subject)
if err != nil {
fmt.Println(fmt.Sprintf("Error parsing the blob url source %s", testBlobCmd.Object))
os.Exit(1)
}
p := ste.NewBlobPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{
Telemetry: azblob.TelemetryOptions{
Value: common.UserAgent,
},
},
ste.XferRetryOptions{
Policy: 0,
MaxTries: ste.UploadMaxTries,
TryTimeout: 10 * time.Minute,
RetryDelay: ste.UploadRetryDelay,
MaxRetryDelay: ste.UploadMaxRetryDelay},
nil,
ste.NewAzcopyHTTPClient())
testCtx := context.WithValue(context.Background(), ste.ServiceAPIVersionOverride, defaultServiceApiVersion)
appendBlobURL := azblob.NewAppendBlobURL(*sourceURL, p)
// get the blob properties and check the blob tier.
if azblob.AccessTierType(testBlobCmd.BlobTier) != azblob.AccessTierNone {
blobProperties, err := appendBlobURL.GetProperties(testCtx, azblob.BlobAccessConditions{})
if err != nil {
fmt.Println(fmt.Sprintf("error getting the properties of the blob. failed with error %s", err.Error()))
os.Exit(1)
}
// If the blob tier does not match the expected blob tier.
if !strings.EqualFold(blobProperties.AccessTier(), testBlobCmd.BlobTier) {
fmt.Println(fmt.Sprintf("Access blob tier type %s does not match the expected %s tier type", blobProperties.AccessTier(), testBlobCmd.BlobTier))
os.Exit(1)
}
// Closing the blobProperties response body.
if blobProperties.Response() != nil {
io.Copy(ioutil.Discard, blobProperties.Response().Body)
blobProperties.Response().Body.Close()
}
}
get, err := appendBlobURL.Download(testCtx, 0, fileInfo.Size(), azblob.BlobAccessConditions{}, false)
if err != nil {
fmt.Println("unable to get blob properties ", err.Error())
os.Exit(1)
}
// reading all the bytes downloaded.
blobBytesDownloaded, err := ioutil.ReadAll(get.Body(azblob.RetryReaderOptions{}))
if get.Response().Body != nil {
get.Response().Body.Close()
}
if err != nil {
fmt.Println("error reading the byes from response and failed with error ", err.Error())
os.Exit(1)
}
// verify the content-type
expectedContentType := ""
if testBlobCmd.NoGuessMimeType {
expectedContentType = testBlobCmd.ContentType
}
if len(blobBytesDownloaded) != 0 {
// memory mapping the resource on local path.
mmap, err := NewMMF(file, false, 0, fileInfo.Size())
if err != nil {
fmt.Println("error mapping the destination blob file ", err.Error())
os.Exit(1)
}
// calculating and verify the md5 of the resource
// both locally and on the container.
actualMd5 := md5.Sum(mmap)
expectedMd5 := md5.Sum(blobBytesDownloaded)
if actualMd5 != expectedMd5 {
fmt.Println("the uploaded blob's md5 doesn't matches the actual blob's md5 for blob ", testBlobCmd.Object)
os.Exit(1)
}
if !testBlobCmd.NoGuessMimeType {
expectedContentType = http.DetectContentType(mmap)
}
mmap.Unmap()
}
// verify the user given metadata supplied while uploading the blob against the metadata actually present in the blob
if !validateMetadata(testBlobCmd.MetaData, get.NewMetadata()) {
fmt.Println("meta data does not match between the actual and uploaded blob.")
os.Exit(1)
}
if testBlobCmd.CheckContentType && !validateString(expectedContentType, get.ContentType()) {
fmt.Printf(
"mismatch content type between actual and user given blob content type, expected %q, actually %q\n",
expectedContentType,
get.ContentType())
os.Exit(1)
}
//verify the content-encoding
if !validateString(testBlobCmd.ContentEncoding, get.ContentEncoding()) {
fmt.Println("mismatch ContentEncoding between actual and user given blob")
os.Exit(1)
}
if !validateString(testBlobCmd.CacheControl, get.CacheControl()) {
fmt.Println("mismatch CacheControl between actual and user given blob")
os.Exit(1)
}
if !validateString(testBlobCmd.ContentDisposition, get.ContentDisposition()) {
fmt.Println("mismatch ContentDisposition between actual and user given blob")
os.Exit(1)
}
if !validateString(testBlobCmd.ContentLanguage, get.ContentLanguage()) {
fmt.Println("mismatch ContentLanguage between actual and user given blob")
os.Exit(1)
}
if testBlobCmd.CheckContentMD5 && (get.ContentMD5() == nil || len(get.ContentMD5()) == 0) {
fmt.Println("ContentMD5 should not be empty")
os.Exit(1)
}
file.Close()
}
<file_sep>package cmd
import "github.com/Azure/azure-storage-azcopy/common"
// ===================================== ROOT COMMAND ===================================== //
const rootCmdShortDescription = "AzCopy is a command line tool that moves data into/out of Azure Storage."
const rootCmdLongDescription = "AzCopy " + common.AzcopyVersion +
`
Project URL: github.com/Azure/azure-storage-azcopy
AzCopy is a command line tool that moves data into/out of Azure Storage.
To report issues or to learn more about the tool, go to github.com/Azure/azure-storage-azcopy
The general format of the commands is: 'azcopy [command] [arguments] --[flag-name]=[flag-value]'.
`
// ===================================== COPY COMMAND ===================================== //
const copyCmdShortDescription = "Copies source data to a destination location"
const copyCmdLongDescription = `
Copies source data to a destination location. The supported directions are:
- local <-> Azure Blob (SAS or OAuth authentication)
- local <-> Azure File (Share/directory SAS authentication)
- local <-> ADLS Gen 2 (OAuth or SharedKey authentication)
- Azure Blob (SAS or public) <-> Azure Blob (SAS or OAuth authentication)
- Azure File (SAS) -> Azure Block Blob (SAS or OAuth authentication)
- AWS S3 (Access Key) -> Azure Block Blob (SAS or OAuth authentication)
Please refer to the examples for more information.
Advanced:
Please note that AzCopy automatically detects the Content Type of the files when uploading from the local disk, based on the file extension or content (if no extension is specified).
The built-in lookup table is small but on Unix it is augmented by the local system's mime.types file(s) if available under one or more of these names:
- /etc/mime.types
- /etc/apache2/mime.types
- /etc/apache/mime.types
On Windows, MIME types are extracted from the registry. This feature can be turned off with the help of a flag. Please refer to the flag section.
`
const copyCmdExample = `Upload a single file using OAuth authentication. Please use 'azcopy login' command first if you aren't logged in yet:
- azcopy cp "/path/to/file.txt" "https://[account].blob.core.windows.net/[container]/[path/to/blob]"
Same as above, but this time also compute MD5 hash of the file content and save it as the blob's Content-MD5 property.
- azcopy cp "/path/to/file.txt" "https://[account].blob.core.windows.net/[container]/[path/to/blob]" --put-md5
Upload a single file with a SAS:
- azcopy cp "/path/to/file.txt" "https://[account].blob.core.windows.net/[container]/[path/to/blob]?[SAS]"
Upload a single file with a SAS using piping (block blobs only):
- cat "/path/to/file.txt" | azcopy cp "https://[account].blob.core.windows.net/[container]/[path/to/blob]?[SAS]"
Upload an entire directory with a SAS:
- azcopy cp "/path/to/dir" "https://[account].blob.core.windows.net/[container]/[path/to/directory]?[SAS]" --recursive=true
or
- azcopy cp "/path/to/dir" "https://[account].blob.core.windows.net/[container]/[path/to/directory]?[SAS]" --recursive=true --put-md5
Upload a set of files with a SAS using wildcards:
- azcopy cp "/path/*foo/*bar/*.pdf" "https://[account].blob.core.windows.net/[container]/[path/to/directory]?[SAS]"
Upload files and directories with a SAS using wildcards:
- azcopy cp "/path/*foo/*bar*" "https://[account].blob.core.windows.net/[container]/[path/to/directory]?[SAS]" --recursive=true
Download a single file using OAuth authentication. Please use 'azcopy login' command first if you aren't logged in yet:
- azcopy cp "https://[account].blob.core.windows.net/[container]/[path/to/blob]" "/path/to/file.txt"
Download a single file with a SAS:
- azcopy cp "https://[account].blob.core.windows.net/[container]/[path/to/blob]?[SAS]" "/path/to/file.txt"
Download a single file with a SAS using piping (block blobs only):
- azcopy cp "https://[account].blob.core.windows.net/[container]/[path/to/blob]?[SAS]" > "/path/to/file.txt"
Download an entire directory with a SAS:
- azcopy cp "https://[account].blob.core.windows.net/[container]/[path/to/directory]?[SAS]" "/path/to/dir" --recursive=true
Download a set of files with a SAS using wildcards:
- azcopy cp "https://[account].blob.core.windows.net/[container]/foo*?[SAS]" "/path/to/dir"
Download files and directories with a SAS using wildcards:
- azcopy cp "https://[account].blob.core.windows.net/[container]/foo*?[SAS]" "/path/to/dir" --recursive=true
Copy a single blob with SAS to another blob with SAS:
- azcopy cp "https://[srcaccount].blob.core.windows.net/[container]/[path/to/blob]?[SAS]" "https://[destaccount].blob.core.windows.net/[container]/[path/to/blob]?[SAS]"
Copy a single blob with SAS to another blob with OAuth token. Please use 'azcopy login' command first if you aren't logged in yet. Note that the OAuth token is used to access the destination storage account:
- azcopy cp "https://[srcaccount].blob.core.windows.net/[container]/[path/to/blob]?[SAS]" "https://[destaccount].blob.core.windows.net/[container]/[path/to/blob]"
Copy an entire directory from blob virtual directory with SAS to another blob virtual directory with SAS:
- azcopy cp "https://[srcaccount].blob.core.windows.net/[container]/[path/to/directory]?[SAS]" "https://[destaccount].blob.core.windows.net/[container]/[path/to/directory]?[SAS]" --recursive=true
Copy an entire account data from blob account with SAS to another blob account with SAS:
- azcopy cp "https://[srcaccount].blob.core.windows.net?[SAS]" "https://[destaccount].blob.core.windows.net?[SAS]" --recursive=true
Copy a single object from S3 with access key to blob with SAS:
- Set environment variable AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY for S3 source.
- azcopy cp "https://s3.amazonaws.com/[bucket]/[object]" "https://[destaccount].blob.core.windows.net/[container]/[path/to/blob]?[SAS]"
Copy an entire directory from S3 with access key to blob virtual directory with SAS:
- Set environment variable AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY for S3 source.
- azcopy cp "https://s3.amazonaws.com/[bucket]/[folder]" "https://[destaccount].blob.core.windows.net/[container]/[path/to/directory]?[SAS]" --recursive=true
- Please refer to https://docs.aws.amazon.com/AmazonS3/latest/user-guide/using-folders.html for what [folder] means for S3.
Copy all buckets in S3 service with access key to blob account with SAS:
- Set environment variable AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY for S3 source.
- azcopy cp "https://s3.amazonaws.com/" "https://[destaccount].blob.core.windows.net?[SAS]" --recursive=true
Copy all buckets in a S3 region with access key to blob account with SAS:
- Set environment variable AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY for S3 source.
- azcopy cp "https://s3-[region].amazonaws.com/" "https://[destaccount].blob.core.windows.net?[SAS]" --recursive=true
`
// ===================================== ENV COMMAND ===================================== //
const envCmdShortDescription = "Shows the environment variables that can configure AzCopy's behavior"
const envCmdLongDescription = `Shows the environment variables that can configure AzCopy's behavior.`
// ===================================== JOBS COMMAND ===================================== //
const jobsCmdShortDescription = "Sub-commands related to managing jobs"
const jobsCmdLongDescription = "Sub-commands related to managing jobs."
const jobsCmdExample = "azcopy jobs show [jobID]"
const listJobsCmdShortDescription = "Display information on all jobs"
const listJobsCmdLongDescription = `
Display information on all jobs.`
const showJobsCmdShortDescription = "Show detailed information for the given job ID"
const showJobsCmdLongDescription = `
Show detailed information for the given job ID: if only the job ID is supplied without a flag, then the progress summary of the job is returned.
If the with-status flag is set, then the list of transfers in the job with the given value will be shown.`
const resumeJobsCmdShortDescription = "Resume the existing job with the given job ID"
const resumeJobsCmdLongDescription = `
Resume the existing job with the given job ID.`
// ===================================== LIST COMMAND ===================================== //
const listCmdShortDescription = "List the entities in a given resource"
const listCmdLongDescription = `List the entities in a given resource. Only Blob containers are supported at the moment.`
const listCmdExample = "azcopy list [containerURL]"
// ===================================== LOGIN COMMAND ===================================== //
const loginCmdShortDescription = "Log in to Azure Active Directory to access Azure Storage resources."
const loginCmdLongDescription = `Log in to Azure Active Directory to access Azure Storage resources.
Note that, to be authorized to your Azure Storage account, you must assign your user 'Storage Blob Data Contributor' role on the Storage account.
This command will cache encrypted login information for current user using the OS built-in mechanisms.
Please refer to the examples for more information.`
const loginCmdExample = `Log in interactively with default AAD tenant ID set to common:
- azcopy login
Log in interactively with a specified tenant ID:
- azcopy login --tenant-id "[TenantID]"
Log in using a VM's system-assigned identity:
- azcopy login --identity
Log in using a VM's user-assigned identity with a Client ID of the service identity:
- azcopy login --identity --identity-client-id "[ServiceIdentityClientID]"
Log in using a VM's user-assigned identity with an Object ID of the service identity:
- azcopy login --identity --identity-object-id "[ServiceIdentityObjectID]"
Log in using a VM's user-assigned identity with a Resource ID of the service identity:
- azcopy login --identity --identity-resource-id "/subscriptions/<subscriptionId>/resourcegroups/myRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myID"
`
// ===================================== LOGOUT COMMAND ===================================== //
const logoutCmdShortDescription = "Log out to terminate access to Azure Storage resources."
const logoutCmdLongDescription = `Log out to terminate access to Azure Storage resources.
This command will remove all the cached login information for the current user.`
// ===================================== MAKE COMMAND ===================================== //
const makeCmdShortDescription = "Create a container/share/filesystem"
const makeCmdLongDescription = `Create a container/share/filesystem represented by the given resource URL.`
const makeCmdExample = `
- azcopy make "https://[account-name].[blob,file,dfs].core.windows.net/[top-level-resource-name]"
`
// ===================================== REMOVE COMMAND ===================================== //
const removeCmdShortDescription = "Delete blobs or files from Azure Storage"
const removeCmdLongDescription = `Delete blobs or files from Azure Storage.`
const removeCmdExample = `
Remove a single blob with SAS:
- azcopy rm "https://[account].blob.core.windows.net/[container]/[path/to/blob]?[SAS]"
Remove an entire virtual directory with a SAS:
- azcopy rm "https://[account].blob.core.windows.net/[container]/[path/to/directory]?[SAS]" --recursive=true
Remove only the top blobs inside a virtual directory but not its sub-directories:
- azcopy rm "https://[account].blob.core.windows.net/[container]/[path/to/virtual/dir]" --recursive=false
Remove a subset of blobs in a virtual directory (ex: only jpg and pdf files, or if the blob name is "exactName"):
- azcopy rm "https://[account].blob.core.windows.net/[container]/[path/to/directory]?[SAS]" --recursive=true --include="*.jpg;*.pdf;exactName"
Remove an entire virtual directory but exclude certain blobs from the scope (ex: every blob that starts with foo or ends with bar):
- azcopy rm "https://[account].blob.core.windows.net/[container]/[path/to/directory]?[SAS]" --recursive=true --exclude="foo*;*bar"
`
// ===================================== SYNC COMMAND ===================================== //
const syncCmdShortDescription = "Replicate source to the destination location"
const syncCmdLongDescription = `
Replicate a source to a destination location. The last modified times are used for comparison, the file is skipped if the last modified time in the destination is more recent. The supported pairs are:
- local <-> Azure Blob (either SAS or OAuth authentication can be used)
Please note that the sync command differs from the copy command in several ways:
0. The recursive flag is on by default.
1. The source and destination should not contain patterns(such as * or ?).
2. The include/exclude flags can be a list of patterns matching to the file names. Please refer to the example section for illustration.
3. If there are files/blobs at the destination that are not present at the source, the user will be prompted to delete them. This prompt can be silenced by using the corresponding flags to automatically answer the deletion question.
Advanced:
Please note that AzCopy automatically detects the Content Type of the files when uploading from the local disk, based on the file extension or content (if no extension is specified).
The built-in lookup table is small but on Unix it is augmented by the local system's mime.types file(s) if available under one or more of these names:
- /etc/mime.types
- /etc/apache2/mime.types
- /etc/apache/mime.types
On Windows, MIME types are extracted from the registry.
`
const syncCmdExample = `
Sync a single file:
- azcopy sync "/path/to/file.txt" "https://[account].blob.core.windows.net/[container]/[path/to/blob]"
Same as above, but this time also compute MD5 hash of the file content and save it as the blob's Content-MD5 property.
- azcopy sync "/path/to/file.txt" "https://[account].blob.core.windows.net/[container]/[path/to/blob]" --put-md5
Sync an entire directory including its sub-directories (note that recursive is by default on):
- azcopy sync "/path/to/dir" "https://[account].blob.core.windows.net/[container]/[path/to/virtual/dir]"
or
- azcopy sync "/path/to/dir" "https://[account].blob.core.windows.net/[container]/[path/to/virtual/dir]" --put-md5
Sync only the top files inside a directory but not its sub-directories:
- azcopy sync "/path/to/dir" "https://[account].blob.core.windows.net/[container]/[path/to/virtual/dir]" --recursive=false
Sync a subset of files in a directory (ex: only jpg and pdf files, or if the file name is "exactName"):
- azcopy sync "/path/to/dir" "https://[account].blob.core.windows.net/[container]/[path/to/virtual/dir]" --include="*.jpg;*.pdf;exactName"
Sync an entire directory but exclude certain files from the scope (ex: every file that starts with foo or ends with bar):
- azcopy sync "/path/to/dir" "https://[account].blob.core.windows.net/[container]/[path/to/virtual/dir]" --exclude="foo*;*bar"
Note: if include/exclude flags are used together, only files matching the include patterns would be looked at, but those matching the exclude patterns would be always be ignored.
`
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"github.com/Azure/azure-storage-azcopy/common"
chk "gopkg.in/check.v1"
"path/filepath"
"strings"
)
// regular blob->local file download
func (s *cmdIntegrationSuite) TestDownloadSingleBlobToFile(c *chk.C) {
bsu := getBSU()
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
for _, blobName := range []string{"singleblobisbest", "打麻将.txt", "%4509%4254$85140&"} {
// set up the container with a single blob
blobList := []string{blobName}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobList)
c.Assert(containerURL, chk.NotNil)
// set up the destination as a single file
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
dstFileName := "whatever"
scenarioHelper{}.generateLocalFilesFromList(c, dstDirName, blobList)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawBlobURLWithSAS := scenarioHelper{}.getRawBlobURLWithSAS(c, containerName, blobList[0])
raw := getDefaultCopyRawInput(rawBlobURLWithSAS.String(), filepath.Join(dstDirName, dstFileName))
// the file was created after the blob, so no sync should happen
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", []string{""}, mockedRPC)
})
// clean the RPC for the next test
mockedRPC.reset()
// now target the destination directory, the result should be the same
raw = getDefaultCopyRawInput(rawBlobURLWithSAS.String(), dstDirName)
// the file was created after the blob, so no sync should happen
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// verify explicitly since the source and destination names will be different:
// the source is "" since the given URL points to the blob itself
// the destination should be the blob name, since the given local path points to the parent dir
c.Assert(len(mockedRPC.transfers), chk.Equals, 1)
c.Assert(mockedRPC.transfers[0].Source, chk.Equals, "")
c.Assert(mockedRPC.transfers[0].Destination, chk.Equals, common.AZCOPY_PATH_SEPARATOR_STRING+blobName)
})
}
}
// regular container->directory download
func (s *cmdIntegrationSuite) TestDownloadBlobContainer(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// set up the destination with an empty folder
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultCopyRawInput(rawContainerURLWithSAS.String(), dstDirName)
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(blobList))
// validate that the right transfers were sent
validateDownloadTransfersAreScheduled(c, common.AZCOPY_PATH_SEPARATOR_STRING, common.AZCOPY_PATH_SEPARATOR_STRING+containerName+common.AZCOPY_PATH_SEPARATOR_STRING, blobList, mockedRPC)
})
// turn off recursive, this time nothing should be transferred
raw.recursive = false
mockedRPC.reset()
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.NotNil)
c.Assert(len(mockedRPC.transfers), chk.Equals, 0)
})
}
// regular vdir->dir download
func (s *cmdIntegrationSuite) TestDownloadBlobVirtualDirectory(c *chk.C) {
bsu := getBSU()
vdirName := "vdir1"
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, vdirName+common.AZCOPY_PATH_SEPARATOR_STRING)
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// set up the destination with an empty folder
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawBlobURLWithSAS(c, containerName, vdirName)
raw := getDefaultCopyRawInput(rawContainerURLWithSAS.String(), dstDirName)
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(blobList))
// validate that the right transfers were sent
expectedTransfers := scenarioHelper{}.shaveOffPrefix(blobList, vdirName+common.AZCOPY_PATH_SEPARATOR_STRING)
validateDownloadTransfersAreScheduled(c, common.AZCOPY_PATH_SEPARATOR_STRING,
common.AZCOPY_PATH_SEPARATOR_STRING+vdirName+common.AZCOPY_PATH_SEPARATOR_STRING, expectedTransfers, mockedRPC)
})
// turn off recursive, this time nothing should be transferred
raw.recursive = false
mockedRPC.reset()
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.NotNil)
c.Assert(len(mockedRPC.transfers), chk.Equals, 0)
})
}
// blobs(from pattern)->directory download
// TODO the current pattern matching behavior is inconsistent with the posix filesystem
// update test after re-writing copy enumerators
func (s *cmdIntegrationSuite) TestDownloadBlobContainerWithPattern(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobsToIgnore := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobsToIgnore), chk.Not(chk.Equals), 0)
// add special blobs that we wish to include
blobsToInclude := []string{"important.pdf", "includeSub/amazing.pdf", "includeSub/wow/amazing.pdf"}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobsToInclude)
// set up the destination with an empty folder
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
rawContainerURLWithSAS.Path += "/*.pdf"
raw := getDefaultCopyRawInput(rawContainerURLWithSAS.String(), dstDirName)
raw.recursive = true
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(blobsToInclude))
// validate that the right transfers were sent
validateDownloadTransfersAreScheduled(c, common.AZCOPY_PATH_SEPARATOR_STRING, common.AZCOPY_PATH_SEPARATOR_STRING,
blobsToInclude, mockedRPC)
})
// turn off recursive, this time nothing should be transferred
raw.recursive = false
mockedRPC.reset()
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// only the top pdf should be included
c.Assert(len(mockedRPC.transfers), chk.Equals, 1)
c.Assert(mockedRPC.transfers[0].Source, chk.Equals, mockedRPC.transfers[0].Destination)
c.Assert(strings.HasSuffix(mockedRPC.transfers[0].Source, ".pdf"), chk.Equals, true)
c.Assert(strings.Contains(mockedRPC.transfers[0].Source[1:], common.AZCOPY_PATH_SEPARATOR_STRING), chk.Equals, false)
})
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"github.com/Azure/azure-storage-azcopy/common"
"time"
)
// the interceptor gathers/saves the job part orders for validation
type interceptor struct {
transfers []common.CopyTransfer
lastRequest interface{}
}
func (i *interceptor) intercept(cmd common.RpcCmd, request interface{}, response interface{}) {
switch cmd {
case common.ERpcCmd.CopyJobPartOrder():
// cache the transfers
copyRequest := *request.(*common.CopyJobPartOrderRequest)
i.transfers = append(i.transfers, copyRequest.Transfers...)
i.lastRequest = request
// mock the result
*(response.(*common.CopyJobPartOrderResponse)) = common.CopyJobPartOrderResponse{JobStarted: true}
case common.ERpcCmd.ListSyncJobSummary():
copyRequest := *request.(*common.CopyJobPartOrderRequest)
// fake the result saying that job is already completed
// doing so relies on the mockedLifecycleManager not quitting the application
*(response.(*common.ListSyncJobSummaryResponse)) = common.ListSyncJobSummaryResponse{
Timestamp: time.Now().UTC(),
JobID: copyRequest.JobID,
ErrorMsg: "",
JobStatus: common.EJobStatus.Completed(),
CompleteJobOrdered: true,
FailedTransfers: []common.TransferDetail{},
}
case common.ERpcCmd.ListJobs():
case common.ERpcCmd.ListJobSummary():
case common.ERpcCmd.ListJobTransfers():
case common.ERpcCmd.PauseJob():
case common.ERpcCmd.CancelJob():
case common.ERpcCmd.ResumeJob():
case common.ERpcCmd.GetJobFromTo():
fallthrough
default:
panic("RPC mock not implemented")
}
}
func (i *interceptor) init() {
// mock out the lifecycle manager so that it can no longer terminate the application
glcm = mockedLifecycleManager{}
}
func (i *interceptor) reset() {
i.transfers = make([]common.CopyTransfer, 0)
i.lastRequest = nil
}
// this lifecycle manager substitute does not perform any action
type mockedLifecycleManager struct{}
func (mockedLifecycleManager) Progress(common.OutputBuilder) {}
func (mockedLifecycleManager) Init(common.OutputBuilder) {}
func (mockedLifecycleManager) Info(string) {}
func (mockedLifecycleManager) Prompt(string) string { return "" }
func (mockedLifecycleManager) Exit(common.OutputBuilder, common.ExitCode) {}
func (mockedLifecycleManager) Error(string) {}
func (mockedLifecycleManager) SurrenderControl() {}
func (mockedLifecycleManager) InitiateProgressReporting(common.WorkController, bool) {}
func (mockedLifecycleManager) GetEnvironmentVariable(common.EnvironmentVariable) string { return "" }
func (mockedLifecycleManager) SetOutputFormat(common.OutputFormat) {}
type dummyProcessor struct {
record []storedObject
}
func (d *dummyProcessor) process(storedObject storedObject) (err error) {
d.record = append(d.record, storedObject)
return
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"fmt"
"net/url"
"strings"
"time"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
chk "gopkg.in/check.v1"
)
// Additional S2S migration cases, besides E2E smoke testing cases for S3/blob/file source contained in test_service_to_service_copy.py
const (
defaultLogVerbosityForCopy = "WARNING"
defaultOutputFormatForCopy = "text"
defaultBlobTypeForCopy = "None"
defaultBlockBlobTierForCopy = "None"
defaultPageBlobTierForCopy = "None"
defaultS2SPreserveProperties = true
defaultS2SPreserveAccessTier = true
defaultS2SGetPropertiesInBackend = true
defaultS2SSourceChangeValidation = true
debugMode = false // keep the debugMode temporarily, as merging happens frequently, and this might be useful for solving potential issue.
)
var defaultS2SInvalideMetadataHandleOption = common.DefaultInvalidMetadataHandleOption
func getDefaultRawCopyInput(src, dst string) rawCopyCmdArgs {
return rawCopyCmdArgs{
src: src,
dst: dst,
recursive: true,
logVerbosity: defaultLogVerbosityForCopy,
output: defaultOutputFormatForCopy,
blobType: defaultBlobTypeForCopy,
blockBlobTier: defaultBlockBlobTierForCopy,
pageBlobTier: defaultPageBlobTierForCopy,
md5ValidationOption: common.DefaultHashValidationOption.String(),
s2sGetPropertiesInBackend: defaultS2SGetPropertiesInBackend,
s2sPreserveAccessTier: defaultS2SPreserveAccessTier,
s2sPreserveProperties: defaultS2SPreserveProperties,
s2sSourceChangeValidation: defaultS2SSourceChangeValidation,
s2sInvalidMetadataHandleOption: defaultS2SInvalideMetadataHandleOption.String(),
}
}
func validateS2STransfersAreScheduled(c *chk.C, srcDirName string, dstDirName string, expectedTransfers []string, mockedRPC interceptor) {
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(expectedTransfers))
if debugMode {
fmt.Println("expectedTransfers: ")
printTransfers(expectedTransfers)
fmt.Println("srcDirName: ", srcDirName)
fmt.Println("dstDirName: ", dstDirName)
}
// validate that the right transfers were sent
lookupMap := scenarioHelper{}.convertListToMap(expectedTransfers)
for _, transfer := range mockedRPC.transfers {
if debugMode {
fmt.Println("transfer.Source: ", transfer.Source)
fmt.Println("transfer.Destination: ", transfer.Destination)
}
srcRelativeFilePath, _ := url.PathUnescape(transfer.Source)
dstRelativeFilePath, _ := url.PathUnescape(transfer.Destination)
unescapedSrcDir, _ := url.PathUnescape(srcDirName)
unescapedDstDir, _ := url.PathUnescape(dstDirName)
srcRelativeFilePath = strings.Replace(srcRelativeFilePath, unescapedSrcDir, "", 1)
dstRelativeFilePath = strings.Replace(srcRelativeFilePath, unescapedDstDir, "", 1)
if debugMode {
fmt.Println("srcRelativeFilePath: ", srcRelativeFilePath)
fmt.Println("dstRelativeFilePath: ", dstRelativeFilePath)
}
// the relative paths should be equal
c.Assert(srcRelativeFilePath, chk.Equals, dstRelativeFilePath)
// look up the transfer is expected
_, dstExist := lookupMap[dstRelativeFilePath]
c.Assert(dstExist, chk.Equals, true)
}
}
func printTransfers(ts []string) {
for _, t := range ts {
fmt.Println(t)
}
}
func (s *cmdIntegrationSuite) TestS2SCopyFromS3ToBlobWithBucketNameNeedBeResolved(c *chk.C) {
s3Client, err := createS3ClientWithMinio(createS3ResOptions{})
c.Assert(err, chk.IsNil)
invalidPrefix := "invalid---bucketname.for---azure"
resolvedPrefix := "invalid-3-bucketname-for-3-azure"
// Generate source bucket
bucketName := generateBucketNameWithCustomizedPrefix(invalidPrefix)
createNewBucketWithName(c, s3Client, bucketName, createS3ResOptions{})
defer deleteBucket(c, s3Client, bucketName)
objectList := scenarioHelper{}.generateCommonRemoteScenarioForS3(c, s3Client, bucketName, "", false)
c.Assert(len(objectList), chk.Not(chk.Equals), 0)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcS3BucketURL := scenarioHelper{}.getRawS3BucketURL(c, "", bucketName) // Use default region
rawDstBlobServiceURLWithSAS := scenarioHelper{}.getRawBlobServiceURLWithSAS(c)
raw := getDefaultRawCopyInput(rawSrcS3BucketURL.String(), rawDstBlobServiceURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(objectList))
// Check container with resolved name has been created
resolvedBucketName := strings.Replace(bucketName, invalidPrefix, resolvedPrefix, 1)
blobServiceURL := scenarioHelper{}.getBlobServiceURL(c)
containerURL := blobServiceURL.NewContainerURL(resolvedBucketName)
c.Assert(scenarioHelper{}.containerExists(containerURL), chk.Equals, true)
defer deleteContainer(c, containerURL)
// Check correct entry are scheduled.
// Example:
// sourceURL pass to azcopy: https://s3.amazonaws.com/invalid---bucketname.for---azures2scopyfroms3toblobwithbucketna
// destURL pass to azcopy: https://jiacstgcanary01.blob.core.windows.net
// transfer.Source by design be scheduled: /tops3objects2scopyfroms3toblobwithbucketnameneedberesolved4243293354900
// transfer.Destination by design be scheduled: /invalid-3-bucketname-for-3-azures2scopyfroms3toblobwithbucketna/tops3objects2scopyfroms3toblobwithbucketnameneedberesolved4243293354900
// Nothing should be replaced during matching for source, and resolved bucket name should be replaced for destination.
validateS2STransfersAreScheduled(c, "", common.AZCOPY_PATH_SEPARATOR_STRING+resolvedBucketName, objectList, mockedRPC)
})
}
func (s *cmdIntegrationSuite) TestS2SCopyFromS3ToBlobWithWildcardInSrcAndBucketNameNeedBeResolved(c *chk.C) {
s3Client, err := createS3ClientWithMinio(createS3ResOptions{})
c.Assert(err, chk.IsNil)
invalidPrefix := "invalid----bucketname.for-azure"
resolvedPrefix := "invalid-4-bucketname-for-azure"
// Generate source bucket
bucketName := generateBucketNameWithCustomizedPrefix(invalidPrefix)
createNewBucketWithName(c, s3Client, bucketName, createS3ResOptions{})
defer deleteBucket(c, s3Client, bucketName)
objectList := scenarioHelper{}.generateCommonRemoteScenarioForS3(c, s3Client, bucketName, "", false)
c.Assert(len(objectList), chk.Not(chk.Equals), 0)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcS3BucketURL := scenarioHelper{}.getRawS3BucketURL(c, "", bucketName) // Use default region
rawDstBlobServiceURLWithSAS := scenarioHelper{}.getRawBlobServiceURLWithSAS(c)
rawSrcS3BucketStrWithWirdcard := strings.Replace(rawSrcS3BucketURL.String(), invalidPrefix, "invalid*", 1)
raw := getDefaultRawCopyInput(rawSrcS3BucketStrWithWirdcard, rawDstBlobServiceURLWithSAS.String())
fmt.Println(raw.src)
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(objectList))
// Check container with resolved name has been created
resolvedBucketName := strings.Replace(bucketName, invalidPrefix, resolvedPrefix, 1)
blobServiceURL := scenarioHelper{}.getBlobServiceURL(c)
containerURL := blobServiceURL.NewContainerURL(resolvedBucketName)
c.Assert(scenarioHelper{}.containerExists(containerURL), chk.Equals, true)
defer deleteContainer(c, containerURL)
// Check correct entry are scheduled.
// Example:
// sourceURL pass to azcopy: https://s3.amazonaws.com/invalid*s2scopyfroms3toblobwithwildcardi
// destURL pass to azcopy: https://jiacstgcanary01.blob.core.windows.net
// transfer.Source by design be scheduled: /invalid----bucketname.for-azures2scopyfroms3toblobwithwildcardi/sub1/sub3/sub5/s3objects2scopyfroms3toblobwithwildcardinsrcandbucketnameneedberesolved435110281300
// transfer.Destination by design be scheduled: /invalid-4-bucketname-for-azures2scopyfroms3toblobwithwildcardi/sub1/sub3/sub5/s3objects2scopyfroms3toblobwithwildcardinsrcandbucketnameneedberesolved435110281300
// org bucket name should be replaced during matching for source, and resolved bucket name should be replaced for destination.
validateS2STransfersAreScheduled(c, common.AZCOPY_PATH_SEPARATOR_STRING+bucketName, common.AZCOPY_PATH_SEPARATOR_STRING+resolvedBucketName, objectList, mockedRPC)
})
}
// This is negative because generateBucketNameWithCustomizedPrefix will return a bucket name with length 63,
// and resolving logic will resolve -- to -2- which means the length to be 64. This exceeds valid container name, so error will be returned.
func (s *cmdIntegrationSuite) TestS2SCopyFromS3ToBlobWithBucketNameNeedBeResolvedNegative(c *chk.C) {
s3Client, err := createS3ClientWithMinio(createS3ResOptions{})
c.Assert(err, chk.IsNil)
invalidPrefix := "invalid.bucketname--for.azure"
// resolvedPrefix := "invalid-bucketname-2-for-azure"
// Generate source bucket
bucketName := generateBucketNameWithCustomizedPrefix(invalidPrefix)
createNewBucketWithName(c, s3Client, bucketName, createS3ResOptions{})
defer deleteBucket(c, s3Client, bucketName)
objectList := scenarioHelper{}.generateCommonRemoteScenarioForS3(c, s3Client, bucketName, "", false)
c.Assert(len(objectList), chk.Not(chk.Equals), 0)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcS3BucketURL := scenarioHelper{}.getRawS3BucketURL(c, "", bucketName) // Use default region
rawDstBlobServiceURLWithSAS := scenarioHelper{}.getRawBlobServiceURLWithSAS(c)
raw := getDefaultRawCopyInput(rawSrcS3BucketURL.String(), rawDstBlobServiceURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.NotNil)
c.Assert(strings.Contains(err.Error(), "the source bucket has invalid name for Azure"), chk.Equals, true)
})
}
// Copy from virtual directory to container, with normal encoding ' ' as ' '.
func (s *cmdIntegrationSuite) TestS2SCopyFromS3ToBlobWithSpaceInSrcNotEncoded(c *chk.C) {
s3Client, err := createS3ClientWithMinio(createS3ResOptions{})
c.Assert(err, chk.IsNil)
// Generate source bucket
bucketName := generateBucketName()
createNewBucketWithName(c, s3Client, bucketName, createS3ResOptions{})
defer deleteBucket(c, s3Client, bucketName)
dstContainerName := generateContainerName()
objectList := []string{"space dir/space object"}
scenarioHelper{}.generateObjects(c, s3Client, bucketName, objectList)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcS3BucketURL := scenarioHelper{}.getRawS3BucketURL(c, "", bucketName) // Use default region
rawSrcS3DirStr := rawSrcS3BucketURL.String() + "/space dir"
rawDstContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, dstContainerName)
raw := getDefaultRawCopyInput(rawSrcS3DirStr, rawDstContainerURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 1)
// common.AZCOPY_PATH_SEPARATOR_STRING added for JobPartPlan file change.
// The destination is URL encoded, as go's URL method do the encoding.
c.Assert(mockedRPC.transfers[0].Destination, chk.Equals, "/space%20dir/space%20object")
})
}
// Copy from virtual directory to container, with special encoding ' ' to '+' by S3 management portal.
// '+' is handled in copy.go before extract the SourceRoot.
// The scheduled transfer would be URL encoded no matter what's the raw source/destination provided by user.
func (s *cmdIntegrationSuite) TestS2SCopyFromS3ToBlobWithSpaceInSrcEncodedAsPlus(c *chk.C) {
s3Client, err := createS3ClientWithMinio(createS3ResOptions{})
c.Assert(err, chk.IsNil)
// Generate source bucket
bucketName := generateBucketName()
createNewBucketWithName(c, s3Client, bucketName, createS3ResOptions{})
defer deleteBucket(c, s3Client, bucketName)
dstContainerName := generateContainerName()
objectList := []string{"space dir/space object"}
scenarioHelper{}.generateObjects(c, s3Client, bucketName, objectList)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcS3BucketURL := scenarioHelper{}.getRawS3BucketURL(c, "", bucketName) // Use default region
rawSrcS3DirStr := rawSrcS3BucketURL.String() + "/space+dir"
rawDstContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, dstContainerName)
raw := getDefaultRawCopyInput(rawSrcS3DirStr, rawDstContainerURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 1)
// common.AZCOPY_PATH_SEPARATOR_STRING added for JobPartPlan file change.
// The destination is URL encoded, as go's URL method do the encoding.
c.Assert(mockedRPC.transfers[0].Destination, chk.Equals, "/space%20dir/space%20object")
})
}
// By design, when source directory contains objects with suffix ‘/’, objects with suffix ‘/’ should be ignored.
func (s *cmdIntegrationSuite) TestS2SCopyFromS3ToBlobWithObjectUsingSlashAsSuffix(c *chk.C) {
s3Client, err := createS3ClientWithMinio(createS3ResOptions{})
c.Assert(err, chk.IsNil)
// Generate source bucket
bucketName := generateBucketName()
createNewBucketWithName(c, s3Client, bucketName, createS3ResOptions{})
defer deleteBucket(c, s3Client, bucketName)
dstContainerName := generateContainerName()
objectList := []string{"fileConsiderdAsDirectory/", "file", "sub1/file"}
scenarioHelper{}.generateObjects(c, s3Client, bucketName, objectList)
validateObjectList := []string{"/file", "/sub1/file"} // common.AZCOPY_PATH_SEPARATOR_STRING added for JobPartPlan file change.
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcS3BucketURL := scenarioHelper{}.getRawS3BucketURL(c, "", bucketName) // Use default region
rawDstContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, dstContainerName)
raw := getDefaultRawCopyInput(rawSrcS3BucketURL.String(), rawDstContainerURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(validateObjectList))
validateS2STransfersAreScheduled(c, "", "", validateObjectList, mockedRPC)
})
}
func (s *cmdIntegrationSuite) TestS2SCopyFromS3AccountWithBucketInDifferentRegionsAndListUseDefaultEndpoint(c *chk.C) {
s3Client, err := createS3ClientWithMinio(createS3ResOptions{})
c.Assert(err, chk.IsNil)
// Cleanup the source S3 account
cleanS3Account(c, s3Client)
// Generate source bucket
bucketName1 := generateBucketNameWithCustomizedPrefix("default-region")
createNewBucketWithName(c, s3Client, bucketName1, createS3ResOptions{})
defer deleteBucket(c, s3Client, bucketName1)
bucketName2 := generateBucketNameWithCustomizedPrefix("us-west-2-region")
bucketRegion2 := "us-west-2"
createNewBucketWithName(c, s3Client, bucketName2, createS3ResOptions{Location: bucketRegion2})
defer deleteBucket(c, s3Client, bucketName2)
objectList1 := scenarioHelper{}.generateCommonRemoteScenarioForS3(c, s3Client, bucketName1, "", true)
c.Assert(len(objectList1), chk.Not(chk.Equals), 0)
objectList2 := scenarioHelper{}.generateCommonRemoteScenarioForS3(c, s3Client, bucketName2, "", true)
c.Assert(len(objectList2), chk.Not(chk.Equals), 0)
validateObjectList := append(objectList1, objectList2...)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcS3AccountURL := scenarioHelper{}.getRawS3AccountURL(c, "") // Use default region
rawDstBlobServiceURLWithSAS := scenarioHelper{}.getRawBlobServiceURLWithSAS(c)
raw := getDefaultRawCopyInput(rawSrcS3AccountURL.String(), rawDstBlobServiceURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateS2STransfersAreScheduled(c, "", "", validateObjectList, mockedRPC)
})
}
func (s *cmdIntegrationSuite) TestS2SCopyFromS3AccountWithBucketInDifferentRegionsAndListUseSpecificRegion(c *chk.C) {
specificRegion := "us-west-2"
s3Client, err := createS3ClientWithMinio(createS3ResOptions{})
c.Assert(err, chk.IsNil)
// Cleanup the source S3 account
cleanS3Account(c, s3Client)
// Generate source bucket
bucketName1 := generateBucketNameWithCustomizedPrefix("default-region")
createNewBucketWithName(c, s3Client, bucketName1, createS3ResOptions{})
defer deleteBucket(c, s3Client, bucketName1)
bucketName2 := generateBucketNameWithCustomizedPrefix(specificRegion)
createNewBucketWithName(c, s3Client, bucketName2, createS3ResOptions{Location: specificRegion})
defer deleteBucket(c, s3Client, bucketName2)
time.Sleep(60 * time.Second) // TODO: review and remove this, which was put here as a workaround to issues with buckets being reported as not existing
objectList1 := scenarioHelper{}.generateCommonRemoteScenarioForS3(c, s3Client, bucketName1, "", true)
c.Assert(len(objectList1), chk.Not(chk.Equals), 0)
objectList2 := scenarioHelper{}.generateCommonRemoteScenarioForS3(c, s3Client, bucketName2, "", true)
c.Assert(len(objectList2), chk.Not(chk.Equals), 0)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcS3AccountURL := scenarioHelper{}.getRawS3AccountURL(c, specificRegion)
rawDstBlobServiceURLWithSAS := scenarioHelper{}.getRawBlobServiceURLWithSAS(c)
raw := getDefaultRawCopyInput(rawSrcS3AccountURL.String(), rawDstBlobServiceURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateS2STransfersAreScheduled(c, "", "", objectList2, mockedRPC)
})
}
func (s *cmdIntegrationSuite) TestS2SCopyFromS3ObjectToBlobContainer(c *chk.C) {
s3Client, err := createS3ClientWithMinio(createS3ResOptions{})
c.Assert(err, chk.IsNil)
// Generate source bucket
bucketName := generateBucketName()
createNewBucketWithName(c, s3Client, bucketName, createS3ResOptions{})
defer deleteBucket(c, s3Client, bucketName)
dstContainerName := generateContainerName()
objectList := []string{"file", "sub/file2"}
scenarioHelper{}.generateObjects(c, s3Client, bucketName, objectList)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcS3ObjectURL := scenarioHelper{}.getRawS3ObjectURL(c, "", bucketName, "file") // Use default region
rawDstContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, dstContainerName)
raw := getDefaultRawCopyInput(rawSrcS3ObjectURL.String(), rawDstContainerURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 1)
c.Assert(mockedRPC.transfers[0].Destination, chk.Equals, "/file")
})
mockedRPC.reset()
rawSrcS3ObjectURL = scenarioHelper{}.getRawS3ObjectURL(c, "", bucketName, "sub/file2") // Use default region
rawDstContainerURLWithSAS = scenarioHelper{}.getRawContainerURLWithSAS(c, dstContainerName)
raw = getDefaultRawCopyInput(rawSrcS3ObjectURL.String(), rawDstContainerURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 1)
c.Assert(mockedRPC.transfers[0].Destination, chk.Equals, "/file2")
})
}
// Copy from container to container, preserve blob tier.
func (s *cmdIntegrationSuite) TestS2SCopyFromContainerToContainerPreserveBlobTier(c *chk.C) {
bsu := getBSU()
srcContainerURL, srcContainerName := createNewContainer(c, bsu)
defer deleteContainer(c, srcContainerURL)
c.Assert(srcContainerURL, chk.NotNil)
blobName := "blobWithCoolTier"
scenarioHelper{}.generateBlockBlobWithAccessTier(c, srcContainerURL, blobName, azblob.AccessTierCool)
dstContainerURL, dstContainerName := createNewContainer(c, bsu)
defer deleteContainer(c, dstContainerURL)
c.Assert(dstContainerURL, chk.NotNil)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, srcContainerName)
rawDstContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, dstContainerName)
raw := getDefaultRawCopyInput(rawSrcContainerURLWithSAS.String(), rawDstContainerURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateS2STransfersAreScheduled(c,
srcContainerURL.String(), dstContainerURL.String(), []string{common.AZCOPY_PATH_SEPARATOR_STRING + blobName}, mockedRPC) // common.AZCOPY_PATH_SEPARATOR_STRING added for JobPartPlan file change.
c.Assert(mockedRPC.transfers[0].BlobTier, chk.Equals, azblob.AccessTierCool)
})
}
// Copy from container to container, and don't preserve blob tier.
func (s *cmdIntegrationSuite) TestS2SCopyFromContainerToContainerNoPreserveBlobTier(c *chk.C) {
bsu := getBSU()
srcContainerURL, srcContainerName := createNewContainer(c, bsu)
defer deleteContainer(c, srcContainerURL)
c.Assert(srcContainerURL, chk.NotNil)
blobName := "blobWithCoolTier"
scenarioHelper{}.generateBlockBlobWithAccessTier(c, srcContainerURL, blobName, azblob.AccessTierCool)
dstContainerURL, dstContainerName := createNewContainer(c, bsu)
defer deleteContainer(c, dstContainerURL)
c.Assert(dstContainerURL, chk.NotNil)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, srcContainerName)
rawDstContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, dstContainerName)
raw := getDefaultRawCopyInput(rawSrcContainerURLWithSAS.String(), rawDstContainerURLWithSAS.String())
raw.s2sPreserveAccessTier = false
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateS2STransfersAreScheduled(c,
srcContainerURL.String(), dstContainerURL.String(), []string{common.AZCOPY_PATH_SEPARATOR_STRING + blobName}, mockedRPC) // common.AZCOPY_PATH_SEPARATOR_STRING added for JobPartPlan file change.
c.Assert(mockedRPC.transfers[0].BlobTier, chk.Equals, azblob.AccessTierNone)
})
}
func (s *cmdIntegrationSuite) TestS2SCopyFromSingleBlobToBlobContainer(c *chk.C) {
bsu := getBSU()
srcContainerURL, srcContainerName := createNewContainer(c, bsu)
defer deleteContainer(c, srcContainerURL)
c.Assert(srcContainerURL, chk.NotNil)
objectList := []string{"file", "sub/file2"}
scenarioHelper{}.generateBlobsFromList(c, srcContainerURL, objectList)
dstContainerURL, dstContainerName := createNewContainer(c, bsu)
defer deleteContainer(c, dstContainerURL)
c.Assert(dstContainerURL, chk.NotNil)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcBlobURL := scenarioHelper{}.getRawBlobURLWithSAS(c, srcContainerName, "file") // Use default region
rawDstContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, dstContainerName)
raw := getDefaultRawCopyInput(rawSrcBlobURL.String(), rawDstContainerURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 1)
c.Assert(mockedRPC.transfers[0].Destination, chk.Equals, "/file")
})
mockedRPC.reset()
rawSrcBlobURL = scenarioHelper{}.getRawBlobURLWithSAS(c, srcContainerName, "sub/file2") // Use default region
rawDstContainerURLWithSAS = scenarioHelper{}.getRawContainerURLWithSAS(c, dstContainerName)
raw = getDefaultRawCopyInput(rawSrcBlobURL.String(), rawDstContainerURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 1)
c.Assert(mockedRPC.transfers[0].Destination, chk.Equals, "/file2")
})
}
func (s *cmdIntegrationSuite) TestS2SCopyFromSingleAzureFileToBlobContainer(c *chk.C) {
bsu := getBSU()
fsu := getFSU()
srcShareURL, srcShareName := createNewShare(c, fsu)
defer deleteShare(c, srcShareURL)
c.Assert(srcShareURL, chk.NotNil)
scenarioHelper{}.generateFlatFiles(c, srcShareURL, []string{"file"})
dstContainerURL, dstContainerName := createNewContainer(c, bsu)
defer deleteContainer(c, dstContainerURL)
c.Assert(dstContainerURL, chk.NotNil)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawSrcFileURL := scenarioHelper{}.getRawFileURLWithSAS(c, srcShareName, "file") // Use default region
rawDstContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, dstContainerName)
raw := getDefaultRawCopyInput(rawSrcFileURL.String(), rawDstContainerURLWithSAS.String())
// bucket should be resolved, and objects should be scheduled for transfer
runCopyAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 1)
c.Assert(mockedRPC.transfers[0].Destination, chk.Equals, "/file")
})
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
chk "gopkg.in/check.v1"
"time"
)
type syncComparatorSuite struct{}
var _ = chk.Suite(&syncComparatorSuite{})
func (s *syncComparatorSuite) TestSyncSourceComparator(c *chk.C) {
dummyCopyScheduler := dummyProcessor{}
srcMD5 := []byte{'s'}
destMD5 := []byte{'d'}
// set up the indexer as well as the source comparator
indexer := newObjectIndexer()
sourceComparator := newSyncSourceComparator(indexer, dummyCopyScheduler.process)
// create a sample destination object
sampleDestinationObject := storedObject{name: "test", relativePath: "/usr/test", lastModifiedTime: time.Now(), md5: destMD5}
// test the comparator in case a given source object is not present at the destination
// meaning no entry in the index, so the comparator should pass the given object to schedule a transfer
compareErr := sourceComparator.processIfNecessary(storedObject{name: "only_at_source", relativePath: "only_at_source", lastModifiedTime: time.Now(), md5: srcMD5})
c.Assert(compareErr, chk.Equals, nil)
// check the source object was indeed scheduled
c.Assert(len(dummyCopyScheduler.record), chk.Equals, 1)
c.Assert(dummyCopyScheduler.record[0].md5, chk.DeepEquals, srcMD5)
// reset the processor so that it's empty
dummyCopyScheduler = dummyProcessor{}
// test the comparator in case a given source object is present at the destination
// and it has a later modified time, so the comparator should pass the give object to schedule a transfer
err := indexer.store(sampleDestinationObject)
c.Assert(err, chk.IsNil)
compareErr = sourceComparator.processIfNecessary(storedObject{name: "test", relativePath: "/usr/test", lastModifiedTime: time.Now().Add(time.Hour), md5: srcMD5})
c.Assert(compareErr, chk.Equals, nil)
// check the source object was indeed scheduled
c.Assert(len(dummyCopyScheduler.record), chk.Equals, 1)
c.Assert(dummyCopyScheduler.record[0].md5, chk.DeepEquals, srcMD5)
c.Assert(len(indexer.indexMap), chk.Equals, 0)
// reset the processor so that it's empty
dummyCopyScheduler = dummyProcessor{}
// test the comparator in case a given source object is present at the destination
// but is has an earlier modified time compared to the one at the destination
// meaning that the source object is considered stale, so no transfer should be scheduled
err = indexer.store(sampleDestinationObject)
c.Assert(err, chk.IsNil)
compareErr = sourceComparator.processIfNecessary(storedObject{name: "test", relativePath: "/usr/test", lastModifiedTime: time.Now().Add(-time.Hour), md5: srcMD5})
c.Assert(compareErr, chk.Equals, nil)
// check no source object was scheduled
c.Assert(len(dummyCopyScheduler.record), chk.Equals, 0)
c.Assert(len(indexer.indexMap), chk.Equals, 0)
}
func (s *syncComparatorSuite) TestSyncDestinationComparator(c *chk.C) {
dummyCopyScheduler := dummyProcessor{}
dummyCleaner := dummyProcessor{}
srcMD5 := []byte{'s'}
destMD5 := []byte{'d'}
// set up the indexer as well as the destination comparator
indexer := newObjectIndexer()
destinationComparator := newSyncDestinationComparator(indexer, dummyCopyScheduler.process, dummyCleaner.process)
// create a sample source object
sampleSourceObject := storedObject{name: "test", relativePath: "/usr/test", lastModifiedTime: time.Now(), md5: srcMD5}
// test the comparator in case a given destination object is not present at the source
// meaning it is an extra file that needs to be deleted, so the comparator should pass the given object to the destinationCleaner
compareErr := destinationComparator.processIfNecessary(storedObject{name: "only_at_dst", relativePath: "only_at_dst", lastModifiedTime: time.Now(), md5: destMD5})
c.Assert(compareErr, chk.Equals, nil)
// verify that destination object is being deleted
c.Assert(len(dummyCopyScheduler.record), chk.Equals, 0)
c.Assert(len(dummyCleaner.record), chk.Equals, 1)
c.Assert(dummyCleaner.record[0].md5, chk.DeepEquals, destMD5)
// reset dummy processors
dummyCopyScheduler = dummyProcessor{}
dummyCleaner = dummyProcessor{}
// test the comparator in case a given destination object is present at the source
// and it has a later modified time, since the source data is stale,
// no transfer happens
err := indexer.store(sampleSourceObject)
c.Assert(err, chk.IsNil)
compareErr = destinationComparator.processIfNecessary(storedObject{name: "test", relativePath: "/usr/test", lastModifiedTime: time.Now().Add(time.Hour), md5: destMD5})
c.Assert(compareErr, chk.Equals, nil)
// verify that the source object is scheduled for transfer
c.Assert(len(dummyCopyScheduler.record), chk.Equals, 0)
c.Assert(len(dummyCleaner.record), chk.Equals, 0)
// reset dummy processors
dummyCopyScheduler = dummyProcessor{}
dummyCleaner = dummyProcessor{}
// test the comparator in case a given destination object is present at the source
// but is has an earlier modified time compared to the one at the source
// meaning that the source object should be transferred since the destination object is stale
err = indexer.store(sampleSourceObject)
c.Assert(err, chk.IsNil)
compareErr = destinationComparator.processIfNecessary(storedObject{name: "test", relativePath: "/usr/test", lastModifiedTime: time.Now().Add(-time.Hour), md5: destMD5})
c.Assert(compareErr, chk.Equals, nil)
// verify that there's no transfer & no deletes
c.Assert(len(dummyCopyScheduler.record), chk.Equals, 1)
c.Assert(dummyCopyScheduler.record[0].md5, chk.DeepEquals, srcMD5)
c.Assert(len(dummyCleaner.record), chk.Equals, 0)
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"encoding/base64"
"encoding/binary"
"fmt"
"net"
"net/url"
"os"
"strings"
"path/filepath"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/azbfs"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/Azure/azure-storage-file-go/azfile"
)
const (
NumOfFilesPerDispatchJobPart = 10000
)
type copyHandlerUtil struct{}
// TODO: Need be replaced with anonymous embedded field technique.
var gCopyUtil = copyHandlerUtil{}
const wildCard = "*"
// checks whether a given url contains a prefix pattern
func (copyHandlerUtil) numOfWildcardInURL(url url.URL) int {
return strings.Count(url.String(), wildCard)
}
// isIPEndpointStyle checkes if URL's host is IP, in this case the storage account endpoint will be composed as:
// http(s)://IP(:port)/storageaccount/share(||container||etc)/...
// TODO: Remove this, it can be replaced by SDK's native support for IP endpoint style.
func (util copyHandlerUtil) isIPEndpointStyle(url url.URL) bool {
return net.ParseIP(url.Host) != nil
}
// checks if a given url points to a container, as opposed to a blob or prefix match
func (util copyHandlerUtil) urlIsContainerOrShare(url *url.URL) bool {
// When it's IP endpoint style, if the path contains more than two "/", then it means it points to a blob, and not a container.
// When it's not IP endpoint style, if the path contains more than one "/", then it means it points to a blob, and not a container.
numOfSlashes := strings.Count(url.Path[1:], "/")
isIPEndpointStyle := util.isIPEndpointStyle(*url)
if (!isIPEndpointStyle && numOfSlashes == 0) || (isIPEndpointStyle && numOfSlashes == 1) {
return true
} else if ((!isIPEndpointStyle && numOfSlashes == 1) || (isIPEndpointStyle && numOfSlashes == 2)) && strings.HasSuffix(url.Path, "/") { // this checks if container_name/ was given
return true
}
return false
}
func (util copyHandlerUtil) appendQueryParamToUrl(url *url.URL, queryParam string) *url.URL {
if len(url.RawQuery) > 0 {
url.RawQuery += "&" + queryParam
} else {
url.RawQuery = queryParam
}
return url
}
// redactSigQueryParam checks for the signature in the given rawquery part of the url
// If the signature exists, it replaces the value of the signature with "REDACTED"
// This api is used when SAS is written to log file to avoid exposing the user given SAS
// TODO: remove this, redactSigQueryParam could be added in SDK
func (util copyHandlerUtil) redactSigQueryParam(rawQuery string) (bool, string) {
rawQuery = strings.ToLower(rawQuery) // lowercase the string so we can look for ?sig= and &sig=
sigFound := strings.Contains(rawQuery, "?sig=")
if !sigFound {
sigFound = strings.Contains(rawQuery, "&sig=")
if !sigFound {
return sigFound, rawQuery // [?|&]sig= not found; return same rawQuery passed in (no memory allocation)
}
}
// [?|&]sig= found, redact its value
values, _ := url.ParseQuery(rawQuery)
for name := range values {
if strings.EqualFold(name, "sig") {
values[name] = []string{"REDACTED"}
}
}
return sigFound, values.Encode()
}
// ConstructCommandStringFromArgs creates the user given commandString from the os Arguments
// If any argument passed is an http Url and contains the signature, then the signature is redacted
func (util copyHandlerUtil) ConstructCommandStringFromArgs() string {
// Get the os Args and strip away the first argument since it will be the path of Azcopy executable
args := os.Args[1:]
if len(args) == 0 {
return ""
}
s := strings.Builder{}
for _, arg := range args {
// If the argument starts with http, it is either the remote source or remote destination
// If there exists a signature in the argument string it needs to be redacted
if startsWith(arg, "http") {
// parse the url
argUrl, err := url.Parse(arg)
// If there is an error parsing the url, then throw the error
if err != nil {
panic(fmt.Errorf("error parsing the url %s. Failed with error %s", argUrl.String(), err.Error()))
}
// Check for the signature query parameter
_, rawQuery := util.redactSigQueryParam(argUrl.RawQuery)
argUrl.RawQuery = rawQuery
s.WriteString(argUrl.String())
} else {
s.WriteString(arg)
}
s.WriteString(" ")
}
return s.String()
}
func (util copyHandlerUtil) urlIsBFSFileSystemOrDirectory(ctx context.Context, url *url.URL, p pipeline.Pipeline) bool {
if util.urlIsContainerOrShare(url) {
return true
}
// Need to get the resource properties and verify if it is a file or directory
dirURL := azbfs.NewDirectoryURL(*url, p)
return dirURL.IsDirectory(context.Background())
}
func (util copyHandlerUtil) urlIsAzureFileDirectory(ctx context.Context, url *url.URL) bool {
// Azure file share case
if util.urlIsContainerOrShare(url) {
return true
}
// Need make request to ensure if it's directory
p := azfile.NewPipeline(azfile.NewAnonymousCredential(), azfile.PipelineOptions{})
directoryURL := azfile.NewDirectoryURL(*url, p)
_, err := directoryURL.GetProperties(ctx)
if err != nil {
return false
}
return true
}
// append a file name to the container path to generate a blob path
func (copyHandlerUtil) generateObjectPath(destinationPath, fileName string) string {
if strings.LastIndex(destinationPath, "/") == len(destinationPath)-1 {
return fmt.Sprintf("%s%s", destinationPath, fileName)
}
return fmt.Sprintf("%s/%s", destinationPath, fileName)
}
// resourceShouldBeIncluded decides whether the file at given path should be included or not
// If no files are explicitly mentioned with the include flag, then given file will be included.
// If there are files mentioned with the include flag, then given file will be matched first
// to decide to keep the file or not
func (util copyHandlerUtil) resourceShouldBeIncluded(parentSourcePath string, includeFileMap map[string]int, filePath string) bool {
// If no files have been mentioned explicitly with the include flag
// then file at given filePath will be included
if len(includeFileMap) == 0 {
return true
}
// strip the parent source path from the file path to match against the
//relative path mentioned in the exclude flag
fileRelativePath := strings.Replace(filePath, parentSourcePath, "", 1)
if fileRelativePath[0] == common.AZCOPY_PATH_SEPARATOR_CHAR {
fileRelativePath = fileRelativePath[1:]
}
// Check if the given filePath exists as an entry in the map
_, ok := includeFileMap[fileRelativePath]
if ok {
return true
}
// Iterate through each entry of the Map
// Matches the given filePath against map entry pattern
// This is to handle case when user passed a sub-dir inside
// source to exclude. All the files inside that sub-directory
// should be excluded.
// For Example: source = C:\User\user-1 exclude = "dir1"
// Entry in Map = C:\User\user-1\dir1\* will match the filePath C:\User\user-1\dir1\file1.txt
for key, _ := range includeFileMap {
if util.blobNameMatchesThePattern(key, fileRelativePath) {
return true
}
}
return false
}
// resourceShouldBeExcluded decides whether the file at given filePath should be excluded from the transfer or not.
// First, checks whether filePath exists in the Map or not.
// Then iterates through each entry of the map and check whether the given filePath matches the expression of any
// entry of the map.
func (util copyHandlerUtil) resourceShouldBeExcluded(parentSourcePath string, excludedFilePathMap map[string]int, filePath string) bool {
// strip the parent source path from the file path to match against the
//relative path mentioned in the exclude flag
fileRelativePath := strings.Replace(filePath, parentSourcePath, "", 1)
if fileRelativePath[0] == common.AZCOPY_PATH_SEPARATOR_CHAR {
fileRelativePath = fileRelativePath[1:]
}
// Check if the given filePath exists as an entry in the map
_, ok := excludedFilePathMap[fileRelativePath]
if ok {
return true
}
// Iterate through each entry of the Map
// Matches the given filePath against map entry pattern
// This is to handle case when user passed a sub-dir inside
// source to exclude. All the files inside that sub-directory
// should be excluded.
// For Example: source = C:\User\user-1 exclude = "dir1"
// Entry in Map = C:\User\user-1\dir1\* will match the filePath C:\User\user-1\dir1\file1.txt
for key, _ := range excludedFilePathMap {
if util.blobNameMatchesThePattern(key, fileRelativePath) {
return true
}
}
return false
}
// relativePathToRoot returns the path of filePath relative to root
// For Example: root = /a1/a2/ filePath = /a1/a2/f1.txt
// relativePath = `f1.txt
// For Example: root = /a1 filePath =/a1/a2/f1.txt
// relativePath = a2/f1.txt
func (util copyHandlerUtil) relativePathToRoot(rootPath, filePath string, pathSep byte) string {
if len(rootPath) == 0 {
return filePath
}
result := strings.Replace(filePath, rootPath, "", 1)
if len(result) > 0 && result[0] == pathSep {
result = result[1:]
}
return result
}
// evaluateSymlinkPath evaluates the symlinkPath and returns the evaluated symlinkPath
func (util copyHandlerUtil) evaluateSymlinkPath(path string) (string, error) {
if len(path) == 0 {
return "", fmt.Errorf("cannot evaluate empty symlinkPath")
}
symLinkPath, err := filepath.EvalSymlinks(path)
if err != nil {
// Network drives are not evaluated using the api "filepath.EvalSymlinks" since it returns error for the network drives.
// So readlink api is used to evaluate the symlinks.
symLinkPath, err = os.Readlink(path)
if err != nil {
return "", fmt.Errorf("error %s evaluating symlink path %s", err.Error(), path)
}
}
// If the evaluated symlinkPath is same as the given path,
// then path cannot be evaluated due to some reason and to avoid
// indefinite recursive calls, this check is added.
if symLinkPath == path {
return "", fmt.Errorf("symlink path %s evaluated back to itself", path)
}
return symLinkPath, nil
}
// get relative path given a root path
func (copyHandlerUtil) getRelativePath(rootPath, filePath string) string {
// root path contains the entire absolute path to the root directory, so we need to take away everything except the root directory from filePath
// example: rootPath = "/dir1/dir2/dir3" filePath = "/dir1/dir2/dir3/file1.txt" result = "dir3/file1.txt" scrubAway="/dir1/dir2/"
if len(rootPath) == 0 {
return filePath
}
result := filePath
// replace the path separator in filepath with AZCOPY_PATH_SEPARATOR
// this replacement is required to handle the windows filepath
filePath = strings.Replace(filePath, common.OS_PATH_SEPARATOR, common.AZCOPY_PATH_SEPARATOR_STRING, -1)
var scrubAway string
// test if root path finishes with a /, if yes, ignore it
if rootPath[len(rootPath)-1:] == common.AZCOPY_PATH_SEPARATOR_STRING {
scrubAway = rootPath[:strings.LastIndex(rootPath[:len(rootPath)-1], common.AZCOPY_PATH_SEPARATOR_STRING)+1]
} else {
// +1 because we want to include the / at the end of the dir
scrubAway = rootPath[:strings.LastIndex(rootPath, common.AZCOPY_PATH_SEPARATOR_STRING)+1]
}
result = strings.Replace(filePath, scrubAway, "", 1)
return result
}
// this function can tell if a path represents a directory (must exist)
func (util copyHandlerUtil) isPathALocalDirectory(pathString string) bool {
// check if path exists
destinationInfo, err := os.Stat(pathString)
if err == nil && destinationInfo.IsDir() {
return true
}
return false
}
func (util copyHandlerUtil) generateLocalPath(directoryPath, fileName string) string {
var result string
// check if the directory path ends with the path separator
if strings.LastIndex(directoryPath, common.AZCOPY_PATH_SEPARATOR_STRING) == len(directoryPath)-1 {
result = fmt.Sprintf("%s%s", directoryPath, fileName)
} else {
result = fmt.Sprintf("%s%s%s", directoryPath, common.AZCOPY_PATH_SEPARATOR_STRING, fileName)
}
// blob name has "/" as Path Separator.
// To preserve the path in blob name on local disk, replace "/" with OS Path Separator
// For Example blob name = "blob-1/blob-2/blob-2" will be "blob-1\\blob-2\\blob-3" for windows
//return strings.Replace(result, "/", string(os.PathSeparator), -1)
return result
}
func (util copyHandlerUtil) getBlobNameFromURL(path string) string {
// return everything after the second /
return strings.SplitAfterN(path[1:], common.AZCOPY_PATH_SEPARATOR_STRING, 2)[1]
}
func (util copyHandlerUtil) getDirNameFromSource(path string) (sourcePathWithoutPrefix, searchPrefix string) {
if path[len(path)-1:] == common.AZCOPY_PATH_SEPARATOR_STRING {
sourcePathWithoutPrefix = path[:strings.LastIndex(path[:len(path)-1], common.AZCOPY_PATH_SEPARATOR_STRING)+1]
searchPrefix = path[strings.LastIndex(path[:len(path)-1], common.AZCOPY_PATH_SEPARATOR_STRING)+1:]
} else {
// +1 because we want to include the / at the end of the dir
sourcePathWithoutPrefix = path[:strings.LastIndex(path, common.AZCOPY_PATH_SEPARATOR_STRING)+1]
searchPrefix = path[strings.LastIndex(path, common.AZCOPY_PATH_SEPARATOR_STRING)+1:]
}
return
}
func (util copyHandlerUtil) firstIndexOfWildCard(name string) int {
return strings.Index(name, wildCard)
}
func (util copyHandlerUtil) getContainerURLFromString(url url.URL) url.URL {
blobParts := azblob.NewBlobURLParts(url)
blobParts.BlobName = ""
return blobParts.URL()
//containerName := strings.SplitAfterN(url.Path[1:], "/", 2)[0]
//url.Path = "/" + containerName
//return url
}
func (util copyHandlerUtil) getContainerUrl(blobParts azblob.BlobURLParts) url.URL {
blobParts.BlobName = ""
return blobParts.URL()
}
func (util copyHandlerUtil) blobNameFromUrl(blobParts azblob.BlobURLParts) string {
return blobParts.BlobName
}
// stripSASFromFileShareUrl takes azure file and remove the SAS query param from the URL.
func (util copyHandlerUtil) stripSASFromFileShareUrl(fileUrl url.URL) *url.URL {
fuParts := azfile.NewFileURLParts(fileUrl)
fuParts.SAS = azfile.SASQueryParameters{}
fUrl := fuParts.URL()
return &fUrl
}
// stripSASFromBlobUrl takes azure blob url and remove the SAS query param from the URL
func (util copyHandlerUtil) stripSASFromBlobUrl(blobUrl url.URL) *url.URL {
buParts := azblob.NewBlobURLParts(blobUrl)
buParts.SAS = azblob.SASQueryParameters{}
bUrl := buParts.URL()
return &bUrl
}
// createBlobUrlFromContainer returns a url for given blob parts and blobName.
func (util copyHandlerUtil) createBlobUrlFromContainer(blobUrlParts azblob.BlobURLParts, blobName string) url.URL {
blobUrlParts.BlobName = blobName
return blobUrlParts.URL()
}
func (util copyHandlerUtil) appendBlobNameToUrl(blobUrlParts azblob.BlobURLParts, blobName string) (url.URL, string) {
//if os.PathSeparator == '\\' {
// blobName = strings.Replace(blobName, string(os.PathSeparator), "/", -1)
//}
if blobUrlParts.BlobName == "" {
blobUrlParts.BlobName = blobName
} else {
if blobUrlParts.BlobName[len(blobUrlParts.BlobName)-1] == '/' {
blobUrlParts.BlobName += blobName
} else {
blobUrlParts.BlobName += common.AZCOPY_PATH_SEPARATOR_STRING + blobName
}
}
return blobUrlParts.URL(), blobUrlParts.BlobName
}
// getRootPathWithoutWildCards returns the directory from path that does not have wildCards
// returns the patterns that defines pattern for relativePath of files to the above mentioned directory
// For Example: source = C:\User\a*\a1*\*.txt rootDir = C:\User\ pattern = a*\a1*\*.txt
func (util copyHandlerUtil) getRootPathWithoutWildCards(path string) (string, string) {
if len(path) == 0 {
return path, "*"
}
// if no wild card exists, then root directory is the given directory
// pattern is '*' i.e to include all the files inside the given path
wIndex := util.firstIndexOfWildCard(path)
if wIndex == -1 {
return path, "*"
}
pathWithoutWildcard := path[:wIndex]
// find the last separator in path without the wildCards
// result will be content of path till the above separator
// for Example: source = C:\User\a*\a1*\*.txt pathWithoutWildcard = C:\User\a
// sepIndex = 7
// rootDirectory = C:\User and pattern = a*\a1*\*.txt
sepIndex := strings.LastIndex(pathWithoutWildcard, common.AZCOPY_PATH_SEPARATOR_STRING)
if sepIndex == -1 {
return "", path
}
return pathWithoutWildcard[:sepIndex], path[sepIndex+1:]
}
// blobNameMatchesThePatternComponentWise matches the blobName against the pattern component wise
// Example: /home/user/dir*/*file matches /home/user/dir1/abcfile but does not matches
// /home/user/dir1/dir2/abcfile. This api does not assume path separator '/' for a wildcard '*'
func (util copyHandlerUtil) blobNameMatchesThePatternComponentWise(pattern string, blobName string) bool {
// find the number of path separator in pattern and blobName
// If the number of path separator doesn't match, then blob name doesn't match the pattern
pSepInPattern := strings.Count(pattern, common.AZCOPY_PATH_SEPARATOR_STRING)
pSepInBlobName := strings.Count(blobName, common.AZCOPY_PATH_SEPARATOR_STRING)
if pSepInPattern != pSepInBlobName {
return false
}
// If the number of path separator matches in both blobName and pattern
// each component of the blobName should match each component in pattern
// Length of patternComponents and blobNameComponents is same since we already
// match the number of path separators above.
patternComponents := strings.Split(pattern, common.AZCOPY_PATH_SEPARATOR_STRING)
blobNameComponents := strings.Split(blobName, common.AZCOPY_PATH_SEPARATOR_STRING)
for index := 0; index < len(patternComponents); index++ {
// match the pattern component and blobName component
if !util.blobNameMatchesThePattern(patternComponents[index], blobNameComponents[index]) {
return false
}
}
return true
}
func (util copyHandlerUtil) blobNameMatchesThePattern(patternString string, blobName string) bool {
str := []rune(blobName)
pattern := []rune(patternString)
s := 0 // counter for str index
p := 0 // counter for pattern index
startIndex := -1
match := 0
for s < len(str) {
// advancing both pointers
if p < len(pattern) && str[s] == pattern[p] {
s++
p++
} else if p < len(pattern) && pattern[p] == '*' {
// * found, only advancing pattern pointer
startIndex = p
match = s
p++
} else if startIndex != -1 {
p = startIndex + 1
match++
s = match
} else {
//current pattern pointer is not star, last patter pointer was not *
//characters do not match
return false
}
}
//check for remaining characters in pattern
for p < len(pattern) && pattern[p] == '*' {
p++
}
return p == len(pattern)
}
// matchBlobNameAgainstPattern matches the given blobName against the pattern. If the recursive is set to true
// '*' in the pattern will match the path sep since we need to recursively look into the sub-dir of given source.
// If recursive is set to false, then matches happens component wise where component is each dir in the given path
// defined by the blobname. For Example: blobname = /dir-1/dir-2/blob1.txt components are dir-1, dir-2, blob1.txt
func (util copyHandlerUtil) matchBlobNameAgainstPattern(pattern string, blobName string, recursive bool) bool {
if recursive {
return util.blobNameMatchesThePattern(pattern, blobName)
}
return util.blobNameMatchesThePatternComponentWise(pattern, blobName)
}
func (util copyHandlerUtil) searchPrefixFromUrl(parts azblob.BlobURLParts) (prefix, pattern string) {
// If the blobName is empty, it means the url provided is of a container,
// then all blobs inside containers needs to be included, so pattern is set to *
if parts.BlobName == "" {
pattern = "*"
return
}
// Check for wildcards and get the index of first wildcard
// If the wild card does not exists, then index returned is -1
wildCardIndex := util.firstIndexOfWildCard(parts.BlobName)
if wildCardIndex < 0 {
// If no wild card exits and url represents a virtual directory
// prefix is the path of virtual directory after the container.
// Example: https://<container-name>/vd-1?<signature>, prefix = /vd-1
// Example: https://<container-name>/vd-1/vd-2?<signature>, prefix = /vd-1/vd-2
prefix = parts.BlobName
// check for separator at the end of virtual directory
if prefix[len(prefix)-1] != '/' {
prefix += "/"
}
// since the url is a virtual directory, then all blobs inside the virtual directory
// needs to be downloaded, so the pattern is "*"
// pattern being "*", all blobNames when matched with "*" will be true
// so all blobs inside the virtual dir will be included
pattern = "*"
return
}
// wild card exists prefix will be the content of blob name till the wildcard index
// Example: https://<container-name>/vd-1/vd-2/abc*
// prefix = /vd-1/vd-2/abc and pattern = /vd-1/vd-2/abc*
// All the blob inside the container in virtual dir vd-2 that have the prefix "abc"
prefix = parts.BlobName[:wildCardIndex]
pattern = parts.BlobName
return
}
func (util copyHandlerUtil) getConatinerUrlAndSuffix(url url.URL) (containerUrl, suffix string) {
s := strings.SplitAfterN(url.Path[1:], "/", 2)
containerUrl = "/" + s[0]
suffix = s[1]
if strings.LastIndex(suffix, "/") == len(suffix)-1 {
// if there is a path separator at the end, then remove the path separator.
suffix = suffix[:len(suffix)-1]
}
return
}
func (util copyHandlerUtil) generateBlobUrl(containerUrl url.URL, blobName string) url.URL {
if containerUrl.Path[len(containerUrl.Path)-1] != '/' {
containerUrl.Path = containerUrl.Path + "/" + blobName
} else {
containerUrl.Path = containerUrl.Path + blobName
}
return containerUrl
}
// for a given virtual directory, find the directory directly above the virtual file
func (util copyHandlerUtil) getLastVirtualDirectoryFromPath(path string) string {
if path == "" {
return ""
}
lastSlashIndex := strings.LastIndex(path, "/")
if lastSlashIndex == -1 {
return ""
}
return path[0 : lastSlashIndex+1]
}
func (util copyHandlerUtil) blockIDIntToBase64(blockID int) string {
blockIDBinaryToBase64 := func(blockID []byte) string { return base64.StdEncoding.EncodeToString(blockID) }
binaryBlockID := (&[4]byte{})[:] // All block IDs are 4 bytes long
binary.LittleEndian.PutUint32(binaryBlockID, uint32(blockID))
return blockIDBinaryToBase64(binaryBlockID)
}
// containsSpecialChars checks for the special characters in the given name.
// " \\ < > * | ? : are not allowed while creating file / dir by the OS.
// space is also included as a special character since space at the end of name of file / dir
// is not considered.
// For example "abcd " is same as "abcd"
func (util copyHandlerUtil) containsSpecialChars(name string) bool {
for _, r := range name {
if r == '"' || r == '\\' || r == '<' ||
r == '>' || r == '|' || r == '*' ||
r == '?' || r == ':' {
return true
}
}
// if the last character in the file / dir name is ' '
// then it not accepted by OS.
// 'test1 ' is created as 'test1'
if len(name) > 0 && name[len(name)-1] == ' ' {
return true
}
return false
}
// blobPathWOSpecialCharacters checks the special character in the given blob path.
// If the special characters exists, then it encodes the path so that blob can created
// locally.
// Some special characters are not allowed while creating file / dir by OS
// returns the path without special characters.
func (util copyHandlerUtil) blobPathWOSpecialCharacters(blobPath string) string {
// split the path by separator "/"
parts := strings.Split(blobPath, "/")
bnwc := ""
// iterates through each part of the path.
// for example if given path is /a/b/c/d/e.txt,
// then check for special character in each part a,b,c,d and e.txt
for i := range parts {
if len(parts[i]) == 0 {
// If the part length is 0, then encode the "/" char and add to the new path.
// This is for scenarios when there exists "/" at the end of blob or start of the blobName.
bnwc += url.QueryEscape("/") + "/"
} else if util.containsSpecialChars(parts[i]) {
// if the special character exists, then perform the encoding.
bnwc += url.QueryEscape(parts[i]) + "/"
} else {
// If there is no special character, then add the part as it is.
bnwc += parts[i] + "/"
}
}
// remove "/" at the end of blob path.
bnwc = bnwc[:len(bnwc)-1]
return bnwc
}
// doesBlobRepresentAFolder verifies whether blob is valid or not.
// Used to handle special scenarios or conditions.
func (util copyHandlerUtil) doesBlobRepresentAFolder(metadata azblob.Metadata) bool {
// this condition is to handle the WASB V1 directory structure.
// HDFS driver creates a blob for the empty directories (let’s call it ‘myfolder’)
// and names all the blobs under ‘myfolder’ as such: ‘myfolder/myblob’
// The empty directory has meta-data 'hdi_isfolder = true'
return metadata["hdi_isfolder"] == "true"
}
func startsWith(s string, t string) bool {
return len(s) >= len(t) && strings.EqualFold(s[0:len(t)], t)
}
func endWithSlashOrBackSlash(path string) bool {
return strings.HasSuffix(path, "/") || strings.HasSuffix(path, "\\")
}
// getFileNameFromPath return the file name from given file path.
func (util copyHandlerUtil) getFileNameFromPath(path string) string {
if path == "" {
return ""
}
if endWithSlashOrBackSlash(path) {
return ""
}
return path[strings.LastIndex(path, "/")+1:]
}
// getDeepestDirOrFileURLFromString returns the deepest valid DirectoryURL or FileURL can be picked out from the provided URL.
// When provided URL is endwith *, get parent directory of file whose name is with *.
// When provided URL without *, the url could be a file or a directory, in this case make request to get valid DirectoryURL or FileURL.
// TODO: deprecated, remove this method
func (util copyHandlerUtil) getDeepestDirOrFileURLFromString(ctx context.Context, givenURL url.URL, p pipeline.Pipeline) (*azfile.DirectoryURL, *azfile.FileURL, *azfile.FileGetPropertiesResponse, bool) {
url := givenURL
path := url.Path
if strings.HasSuffix(path, "*") {
lastSlashIndex := strings.LastIndex(path, "/")
url.Path = url.Path[:lastSlashIndex]
} else {
if !strings.HasSuffix(path, "/") {
// Could be a file or a directory, try to see if file exists
fileURL := azfile.NewFileURL(url, p)
if gResp, err := fileURL.GetProperties(ctx); err == nil {
return nil, &fileURL, gResp, true
} else {
glcm.Info("Fail to parse " +
common.URLExtension{URL: url}.RedactSecretQueryParamForLogging() +
" as a file for error " + err.Error() + ", given URL: " + givenURL.String())
}
}
}
dirURL := azfile.NewDirectoryURL(url, p)
if _, err := dirURL.GetProperties(ctx); err == nil {
return &dirURL, nil, nil, true
} else {
glcm.Info("Fail to parse " +
common.URLExtension{URL: url}.RedactSecretQueryParamForLogging() +
" as a directory for error " + err.Error() + ", given URL: " + givenURL.String())
}
return nil, nil, nil, false
}
// isDirectoryStartExpression verifies if an url is like directory/* or share/* which equals to a directory or share.
// If it could be transferred to a directory, return the URL which directly express directory.
func (util copyHandlerUtil) hasEquivalentDirectoryURL(url url.URL) (isDirectoryStartExpression bool, equivalentURL url.URL) {
if strings.HasSuffix(url.Path, "/*") {
url.Path = url.Path[:len(url.Path)-1]
isDirectoryStartExpression = true
}
equivalentURL = url
return
}
// replaceBackSlashWithSlash replaces all backslash '\' with slash '/' in a given URL string.
func (util copyHandlerUtil) replaceBackSlashWithSlash(urlStr string) string {
str := strings.Replace(urlStr, "\\", "/", -1)
return str
}
/////////////////////////////////////////////////////////////////////////////////////////////////
type urlExtension struct {
url.URL
}
func (u urlExtension) redactSigQueryParamForLogging() string {
if ok, rawQuery := gCopyUtil.redactSigQueryParam(u.RawQuery); ok {
u.RawQuery = rawQuery
}
return u.String()
}
func (u urlExtension) generateObjectPath(objectName string) url.URL {
u.Path = gCopyUtil.generateObjectPath(u.Path, objectName)
return u.URL
}
/////////////////////////////////////////////////////////////////////////////////////////////////
type blobURLPartsExtension struct {
azblob.BlobURLParts
}
// searchPrefixFromBlobURL gets search prefix and patterns from Blob URL.
func (parts blobURLPartsExtension) searchPrefixFromBlobURL() (prefix, pattern string, isWildcardSearch bool) {
// If the blobName is empty, it means the url provided is of a container,
// then all blobs inside containers needs to be included, so pattern is set to *
if parts.BlobName == "" {
pattern = "*"
return
}
// Check for wildcards and get the index of first wildcard
// If the wild card does not exists, then index returned is -1
wildCardIndex := gCopyUtil.firstIndexOfWildCard(parts.BlobName)
if wildCardIndex < 0 {
// If no wild card exits and url represents a virtual directory
// prefix is the path of virtual directory after the container.
// Example: https://<container-name>/vd-1?<signature>, prefix = /vd-1
// Example: https://<container-name>/vd-1/vd-2?<signature>, prefix = /vd-1/vd-2
prefix = parts.BlobName
// check for separator at the end of virtual directory
if prefix[len(prefix)-1] != '/' {
prefix += "/"
}
// since the url is a virtual directory, then all blobs inside the virtual directory
// needs to be downloaded, so the pattern is "*"
// pattern being "*", all blobNames when matched with "*" will be true
// so all blobs inside the virtual dir will be included
pattern = "*"
return
}
isWildcardSearch = true
// wild card exists prefix will be the content of blob name till the wildcard index
// Example: https://<container-name>/vd-1/vd-2/abc*
// prefix = /vd-1/vd-2/abc and pattern = /vd-1/vd-2/abc*
// All the blob inside the container in virtual dir vd-2 that have the prefix "abc"
prefix = parts.BlobName[:wildCardIndex]
pattern = parts.BlobName
return
}
// isBlobAccountLevelSearch check if it's an account level search for blob service.
// And returns search prefix(part before wildcard) for container and pattern is the blob pattern to match.
func (parts blobURLPartsExtension) isBlobAccountLevelSearch() (isBlobAccountLevelSearch bool, containerPrefix string) {
// If it's account level URL which need search container, there could be two cases:
// a. https://<account-name>(/)
// b. https://<account-name>/containerprefix*(/*)
if parts.ContainerName == "" ||
strings.Contains(parts.ContainerName, wildCard) {
isBlobAccountLevelSearch = true
// For case container name is empty, search for all containers.
if parts.ContainerName == "" {
return
}
wildCardIndex := gCopyUtil.firstIndexOfWildCard(parts.ContainerName)
// wild card exists prefix will be the content of container name till the wildcard index
// Example 1: for URL https://<account-name>/c-2*, containerPrefix = c-2
// Example 2: for URL https://<account-name>/c-2*/vd/b*, containerPrefix = c-2
containerPrefix = parts.ContainerName[:wildCardIndex]
return
}
// Otherwise, it's not account level search.
return
}
func (parts blobURLPartsExtension) getContainerURL() url.URL {
parts.BlobName = ""
return parts.URL()
}
func (parts blobURLPartsExtension) getServiceURL() url.URL {
parts.ContainerName = ""
parts.BlobName = ""
return parts.URL()
}
func (parts blobURLPartsExtension) isContainerSyntactically() bool {
return parts.Host != "" && parts.ContainerName != "" && parts.BlobName == ""
}
func (parts blobURLPartsExtension) isServiceSyntactically() bool {
return parts.Host != "" && parts.ContainerName == "" && parts.BlobName == ""
}
func (parts blobURLPartsExtension) isBlobSyntactically() bool {
return parts.Host != "" && parts.ContainerName != "" && parts.BlobName != "" && !strings.HasSuffix(parts.BlobName, common.AZCOPY_PATH_SEPARATOR_STRING)
}
// Get the source path without the wildcards
// This is defined since the files mentioned with exclude flag
// & include flag are relative to the Source
// If the source has wildcards, then files are relative to the
// parent source path which is the path of last directory in the source
// without wildcards
// For Example: src = "/home/user/dir1" parentSourcePath = "/home/user/dir1"
// For Example: src = "/home/user/dir*" parentSourcePath = "/home/user"
// For Example: src = "/home/*" parentSourcePath = "/home"
func (parts blobURLPartsExtension) getParentSourcePath() string {
parentSourcePath := parts.BlobName
wcIndex := gCopyUtil.firstIndexOfWildCard(parentSourcePath)
if wcIndex != -1 {
parentSourcePath = parentSourcePath[:wcIndex]
pathSepIndex := strings.LastIndex(parentSourcePath, "/")
if pathSepIndex == -1 {
parentSourcePath = ""
} else {
parentSourcePath = parentSourcePath[:pathSepIndex]
}
}
return parentSourcePath
}
/////////////////////////////////////////////////////////////////////////////////////////////////
type fileURLPartsExtension struct {
azfile.FileURLParts
}
// isFileAccountLevelSearch check if it's an account level search for file service.
// And returns search prefix(part before wildcard) and pattern when it's account level search.
func (parts fileURLPartsExtension) isFileAccountLevelSearch() (isFileAccountLevelSearch bool, prefix string) {
// If it's account level URL which need search share, there could be two cases:
// a. https://<account-name>(/)
// b. https://<account-name>/shareprefix*
if parts.ShareName == "" ||
strings.Contains(parts.ShareName, wildCard) {
isFileAccountLevelSearch = true
// For case 1-a, search for all shares.
if parts.ShareName == "" {
return
}
wildCardIndex := gCopyUtil.firstIndexOfWildCard(parts.ShareName)
// wild card exists prefix will be the content of share name till the wildcard index
// Example 1: for URL https://<account-name>/s-2*, sharePrefix = s-2
// Example 2: for URL https://<account-name>/s-2*/d/f*, sharePrefix = s-2
prefix = parts.ShareName[:wildCardIndex]
return
}
// Otherwise, it's not account level search.
return
}
// searchPrefixFromFileURL aligns to blobURL's method searchPrefixFromBlobURL
// Note: This method doesn't validate if the provided URL points to a FileURL, and will treat the input without
// wildcard as directory URL.
func (parts fileURLPartsExtension) searchPrefixFromFileURL() (prefix, pattern string, isWildcardSearch bool) {
// If the DirectoryOrFilePath is empty, it means the url provided is of a share,
// then all files inside share needs to be included, so pattern is set to *
if parts.DirectoryOrFilePath == "" {
pattern = "*"
return
}
// Check for wildcards and get the index of first wildcard
// If the wild card does not exists, then index returned is -1
wildCardIndex := gCopyUtil.firstIndexOfWildCard(parts.DirectoryOrFilePath)
if wildCardIndex < 0 {
// If no wild card exits and url represents a directory
// prefix is the path of directory after the share.
// Example: https://<share-name>/d-1?<signature>, prefix = /d-1
// Example: https://<share-name>/d-1/d-2?<signature>, prefix = /d-1/d-2
prefix = parts.DirectoryOrFilePath
// check for separator at the end of directory
if prefix[len(prefix)-1] != '/' {
prefix += "/"
}
// since the url is a directory, then all files inside the directory
// needs to be downloaded, so the pattern is "*"
pattern = "*"
return
}
isWildcardSearch = true
// wild card exists prefix will be the content of file name till the wildcard index
// Example: https://<share-name>/vd-1/vd-2/abc*
// prefix = /vd-1/vd-2/abc and pattern = /vd-1/vd-2/abc*
// All the file inside the share in dir vd-2 that have the prefix "abc"
prefix = parts.DirectoryOrFilePath[:wildCardIndex]
pattern = parts.DirectoryOrFilePath
return
}
// Aligns to blobURL's getParentSourcePath
func (parts fileURLPartsExtension) getParentSourcePath() string {
parentSourcePath := parts.DirectoryOrFilePath
wcIndex := gCopyUtil.firstIndexOfWildCard(parentSourcePath)
if wcIndex != -1 {
parentSourcePath = parentSourcePath[:wcIndex]
pathSepIndex := strings.LastIndex(parentSourcePath, "/")
if pathSepIndex == -1 {
parentSourcePath = ""
} else {
parentSourcePath = parentSourcePath[:pathSepIndex]
}
}
return parentSourcePath
}
// getDirURLAndSearchPrefixFromFileURL gets the sub dir and file search prefix based on provided File service resource URL.
// Note: This method doesn't validate if the provided URL points to a FileURL, and will treat the input without
// wildcard as directory URL.
func (parts fileURLPartsExtension) getDirURLAndSearchPrefixFromFileURL(p pipeline.Pipeline) (dirURL azfile.DirectoryURL, prefix string) {
// If the DirectoryOrFilePath is empty, it means the url provided is of a share,
// then all files and directories inside share needs to be included, so pattern is set to *
if parts.DirectoryOrFilePath == "" {
dirURL = azfile.NewDirectoryURL(parts.URL(), p)
return
}
// Check for wildcards and get the index of first wildcard
// If the wild card does not exists, then index returned is -1
wildCardIndex := gCopyUtil.firstIndexOfWildCard(parts.DirectoryOrFilePath)
if wildCardIndex < 0 {
// If no wild card exits and url represents a directory
// file prefix is "".
// Example: https://<share-name>/d-1?<signature>, directoryURL = https://<share-name>/d-1?<signature>, prefix = ""
dirURL = azfile.NewDirectoryURL(parts.URL(), p)
return
}
// wild card exists prefix will be the content of file name till the wildcard index
// Example: https://<share-name>/d-1/d-2/abc*
// diretoryURL = "https://<share-name>/d-1/d-2/", prefix = abc
dirOrFilePath := parts.DirectoryOrFilePath
lastSlashIndex := strings.LastIndex(dirOrFilePath, "/")
prefix = dirOrFilePath[lastSlashIndex+1 : wildCardIndex] // If no slash exist, start from 0, end at wildcard index.
// compose the parent directory of search prefix
parts.DirectoryOrFilePath = dirOrFilePath[:lastSlashIndex]
dirURL = azfile.NewDirectoryURL(parts.URL(), p)
return
}
func (parts fileURLPartsExtension) getShareURL() url.URL {
parts.DirectoryOrFilePath = ""
return parts.URL()
}
func (parts fileURLPartsExtension) getServiceURL() url.URL {
parts.ShareName = ""
parts.DirectoryOrFilePath = ""
return parts.URL()
}
func (parts fileURLPartsExtension) isFileSyntactically() bool {
return parts.Host != "" && parts.ShareName != "" && parts.DirectoryOrFilePath != "" && !strings.HasSuffix(parts.DirectoryOrFilePath, common.AZCOPY_PATH_SEPARATOR_STRING)
}
/////////////////////////////////////////////////////////////////////////////////////////////////
type adlsGen2PathURLPartsExtension struct {
azbfs.BfsURLParts
}
// searchPrefixFromFileURL aligns to blobURL's method searchPrefixFromBlobURL
// Note: This method doesn't validate if the provided URL points to a FileURL, and will treat the input without
// wildcard as directory URL.
func (parts adlsGen2PathURLPartsExtension) searchPrefixFromADLSGen2PathURL() (prefix, pattern string, isWildcardSearch bool) {
// If the DirectoryOrFilePath is empty, it means the url provided is of a filesystem,
// then all files inside filesystem needs to be included, so pattern is set to *
if parts.DirectoryOrFilePath == "" {
pattern = "*"
return
}
// Check for wildcards and get the index of first wildcard
// If the wild card does not exists, then index returned is -1
wildCardIndex := gCopyUtil.firstIndexOfWildCard(parts.DirectoryOrFilePath)
if wildCardIndex < 0 {
// If no wild card exits and url represents a directory
// prefix is the path of directory after the filesystem.
// Example: https://<filesystem-name>/d-1?<signature>, prefix = /d-1
// Example: https://<filesystem-name>/d-1/d-2?<signature>, prefix = /d-1/d-2
prefix = parts.DirectoryOrFilePath
// check for separator at the end of directory
if prefix[len(prefix)-1] != '/' {
prefix += "/"
}
// since the url is a directory, then all files inside the directory
// needs to be downloaded, so the pattern is "*"
pattern = "*"
return
}
isWildcardSearch = true
// wild card exists prefix will be the content of file name till the wildcard index
// Example: https://<filesystem-name>/vd-1/vd-2/abc*
// prefix = /vd-1/vd-2/abc and pattern = /vd-1/vd-2/abc*
// All the file inside the filesystem in dir vd-2 that have the prefix "abc"
prefix = parts.DirectoryOrFilePath[:wildCardIndex]
pattern = parts.DirectoryOrFilePath
return
}
// Aligns to blobURL's getParentSourcePath
func (parts adlsGen2PathURLPartsExtension) getParentSourcePath() string {
parentSourcePath := parts.DirectoryOrFilePath
wcIndex := gCopyUtil.firstIndexOfWildCard(parentSourcePath)
if wcIndex != -1 {
parentSourcePath = parentSourcePath[:wcIndex]
pathSepIndex := strings.LastIndex(parentSourcePath, "/")
if pathSepIndex == -1 {
parentSourcePath = ""
} else {
parentSourcePath = parentSourcePath[:pathSepIndex]
}
}
return parentSourcePath
}
// createFileURLFromFileSystem returns a url for given ADLS gen2 parts and directoryOrFilePath.
func (parts adlsGen2PathURLPartsExtension) createADLSGen2PathURLFromFileSystem(directoryOrFilePath string) url.URL {
parts.DirectoryOrFilePath = directoryOrFilePath
return parts.URL()
}
/////////////////////////////////////////////////////////////////////////////////////////////////
type s3URLPartsExtension struct {
common.S3URLParts
}
// isServiceLevelSearch check if it's an service level search for S3.
// And returns search prefix(part before wildcard) for bucket to match, if it's service level search.
func (p *s3URLPartsExtension) isServiceLevelSearch() (IsServiceLevelSearch bool, bucketPrefix string) {
// If it's service level URL which need search bucket, there could be two cases:
// a. https://<service-endpoint>(/)
// b. https://<service-endpoint>/bucketprefix*(/*)
if p.IsServiceSyntactically() ||
strings.Contains(p.BucketName, wildCard) {
IsServiceLevelSearch = true
// Case p.IsServiceSyntactically(), bucket name is empty, search for all buckets.
if p.BucketName == "" {
return
}
// Case bucketname contains wildcard.
wildCardIndex := gCopyUtil.firstIndexOfWildCard(p.BucketName)
// wild card exists prefix will be the content of bucket name till the wildcard index
// Example 1: for URL https://<service-endpoint>/b-2*, bucketPrefix = b-2
// Example 2: for URL https://<service-endpoint>/b-2*/vd/o*, bucketPrefix = b-2
bucketPrefix = p.BucketName[:wildCardIndex]
return
}
// Otherwise, it's not service level search.
return
}
// searchObjectPrefixAndPatternFromS3URL gets search prefix and pattern from S3 URL.
// search prefix is used during listing objects in bucket, and pattern is used to support wildcard search by azcopy-v10.
func (p *s3URLPartsExtension) searchObjectPrefixAndPatternFromS3URL() (prefix, pattern string, isWildcardSearch bool) {
// If the objectKey is empty, it means the url provided is of a bucket,
// then all object inside buckets needs to be included, so prefix is "" and pattern is set to *, isWildcardSearch false
if p.ObjectKey == "" {
pattern = "*"
return
}
// Check for wildcard
wildCardIndex := gCopyUtil.firstIndexOfWildCard(p.ObjectKey)
// If no wildcard exits and url represents a virtual directory or a object, search everything in the virtual directory
// or specifically the object.
if wildCardIndex < 0 {
// prefix is the path of virtual directory after the bucket, pattern is *, isWildcardSearch false
// Example 1: https://<bucket-name>/vd-1/, prefix = /vd-1/
// Example 2: https://<bucket-name>/vd-1/vd-2/, prefix = /vd-1/vd-2/
// Example 3: https://<bucket-name>/vd-1/abc, prefix = /vd1/abc
prefix = p.ObjectKey
pattern = "*"
return
}
// Is wildcard search
isWildcardSearch = true
// wildcard exists prefix will be the content of object key till the wildcard index
// Example: https://<bucket-name>/vd-1/vd-2/abc*
// prefix = /vd-1/vd-2/abc, pattern = /vd-1/vd-2/abc*, isWildcardSearch true
prefix = p.ObjectKey[:wildCardIndex]
pattern = p.ObjectKey
return
}
// Get the source path without the wildcards
// This is defined since the files mentioned with exclude flag
// & include flag are relative to the Source
// If the source has wildcards, then files are relative to the
// parent source path which is the path of last directory in the source
// without wildcards
// For Example: src = "/home/user/dir1" parentSourcePath = "/home/user/dir1"
// For Example: src = "/home/user/dir*" parentSourcePath = "/home/user"
// For Example: src = "/home/*" parentSourcePath = "/home"
func (p *s3URLPartsExtension) getParentSourcePath() string {
parentSourcePath := p.ObjectKey
wcIndex := gCopyUtil.firstIndexOfWildCard(parentSourcePath)
if wcIndex != -1 {
parentSourcePath = parentSourcePath[:wcIndex]
pathSepIndex := strings.LastIndex(parentSourcePath, "/")
if pathSepIndex == -1 {
parentSourcePath = ""
} else {
parentSourcePath = parentSourcePath[:pathSepIndex]
}
}
return parentSourcePath
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"bytes"
"context"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
chk "gopkg.in/check.v1"
"io/ioutil"
"path/filepath"
"strings"
)
const (
defaultLogVerbosityForSync = "WARNING"
)
// regular blob->file sync
func (s *cmdIntegrationSuite) TestSyncDownloadWithSingleFile(c *chk.C) {
bsu := getBSU()
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
for _, blobName := range []string{"singleblobisbest", "打麻将.txt", "%4509%4254$85140&"} {
// set up the container with a single blob
blobList := []string{blobName}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobList)
c.Assert(containerURL, chk.NotNil)
// set up the destination as a single file
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
dstFileName := blobName
scenarioHelper{}.generateLocalFilesFromList(c, dstDirName, blobList)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawBlobURLWithSAS := scenarioHelper{}.getRawBlobURLWithSAS(c, containerName, blobList[0])
raw := getDefaultSyncRawInput(rawBlobURLWithSAS.String(), filepath.Join(dstDirName, dstFileName))
// the file was created after the blob, so no sync should happen
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 0)
})
// recreate the blob to have a later last modified time
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobList)
mockedRPC.reset()
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", []string{""}, mockedRPC)
})
}
}
// regular container->directory sync but destination is empty, so everything has to be transferred
func (s *cmdIntegrationSuite) TestSyncDownloadWithEmptyDestination(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// set up the destination with an empty folder
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultSyncRawInput(rawContainerURLWithSAS.String(), dstDirName)
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(blobList))
// validate that the right transfers were sent
validateDownloadTransfersAreScheduled(c, "", "", blobList, mockedRPC)
})
// turn off recursive, this time only top blobs should be transferred
raw.recursive = false
mockedRPC.reset()
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
c.Assert(len(mockedRPC.transfers), chk.Not(chk.Equals), len(blobList))
for _, transfer := range mockedRPC.transfers {
c.Assert(strings.Contains(transfer.Source, common.AZCOPY_PATH_SEPARATOR_STRING), chk.Equals, false)
}
})
}
// regular container->directory sync but destination is identical to the source, transfers are scheduled based on lmt
func (s *cmdIntegrationSuite) TestSyncDownloadWithIdenticalDestination(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// set up the destination with a folder that have the exact same files
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
scenarioHelper{}.generateLocalFilesFromList(c, dstDirName, blobList)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultSyncRawInput(rawContainerURLWithSAS.String(), dstDirName)
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 0)
})
// refresh the blobs' last modified time so that they are newer
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobList)
mockedRPC.reset()
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", blobList, mockedRPC)
})
}
// regular container->directory sync where destination is missing some files from source, and also has some extra files
func (s *cmdIntegrationSuite) TestSyncDownloadWithMismatchedDestination(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// set up the destination with a folder that have half of the files from source
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
scenarioHelper{}.generateLocalFilesFromList(c, dstDirName, blobList[0:len(blobList)/2])
scenarioHelper{}.generateLocalFilesFromList(c, dstDirName, []string{"extraFile1.pdf, extraFile2.txt"})
expectedOutput := blobList[len(blobList)/2:] // the missing half of source files should be transferred
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultSyncRawInput(rawContainerURLWithSAS.String(), dstDirName)
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", expectedOutput, mockedRPC)
// make sure the extra files were deleted
currentDstFileList, err := ioutil.ReadDir(dstDirName)
extraFilesFound := false
for _, file := range currentDstFileList {
if strings.Contains(file.Name(), "extra") {
extraFilesFound = true
}
}
c.Assert(extraFilesFound, chk.Equals, false)
})
}
// include flag limits the scope of source/destination comparison
func (s *cmdIntegrationSuite) TestSyncDownloadWithIncludeFlag(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// add special blobs that we wish to include
blobsToInclude := []string{"important.pdf", "includeSub/amazing.jpeg", "exactName"}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobsToInclude)
includeString := "*.pdf;*.jpeg;exactName"
// set up the destination with an empty folder
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultSyncRawInput(rawContainerURLWithSAS.String(), dstDirName)
raw.include = includeString
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", blobsToInclude, mockedRPC)
})
}
// exclude flag limits the scope of source/destination comparison
func (s *cmdIntegrationSuite) TestSyncDownloadWithExcludeFlag(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// add special blobs that we wish to exclude
blobsToExclude := []string{"notGood.pdf", "excludeSub/lame.jpeg", "exactName"}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobsToExclude)
excludeString := "*.pdf;*.jpeg;exactName"
// set up the destination with an empty folder
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultSyncRawInput(rawContainerURLWithSAS.String(), dstDirName)
raw.exclude = excludeString
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", blobList, mockedRPC)
})
}
// include and exclude flag can work together to limit the scope of source/destination comparison
func (s *cmdIntegrationSuite) TestSyncDownloadWithIncludeAndExcludeFlag(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// add special blobs that we wish to include
blobsToInclude := []string{"important.pdf", "includeSub/amazing.jpeg"}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobsToInclude)
includeString := "*.pdf;*.jpeg;exactName"
// add special blobs that we wish to exclude
// note that the excluded files also match the include string
blobsToExclude := []string{"sorry.pdf", "exclude/notGood.jpeg", "exactName", "sub/exactName"}
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobsToExclude)
excludeString := "so*;not*;exactName"
// set up the destination with an empty folder
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultSyncRawInput(rawContainerURLWithSAS.String(), dstDirName)
raw.include = includeString
raw.exclude = excludeString
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
validateDownloadTransfersAreScheduled(c, "", "", blobsToInclude, mockedRPC)
})
}
// validate the bug fix for this scenario
func (s *cmdIntegrationSuite) TestSyncDownloadWithMissingDestination(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// set up the destination as a missing folder
dstDirName := filepath.Join(scenarioHelper{}.generateLocalDirectory(c), "imbatman")
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultSyncRawInput(rawContainerURLWithSAS.String(), dstDirName)
runSyncAndVerify(c, raw, func(err error) {
// error should not be nil, but the app should not crash either
c.Assert(err, chk.NotNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 0)
})
}
// there is a type mismatch between the source and destination
func (s *cmdIntegrationSuite) TestSyncMismatchContainerAndFile(c *chk.C) {
bsu := getBSU()
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, "")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// set up the destination as a single file
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
dstFileName := blobList[0]
scenarioHelper{}.generateLocalFilesFromList(c, dstDirName, blobList)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawContainerURLWithSAS(c, containerName)
raw := getDefaultSyncRawInput(rawContainerURLWithSAS.String(), filepath.Join(dstDirName, dstFileName))
// type mismatch, we should get an error
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.NotNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 0)
})
// reverse the source and destination
raw = getDefaultSyncRawInput(filepath.Join(dstDirName, dstFileName), rawContainerURLWithSAS.String())
// type mismatch, we should get an error
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.NotNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 0)
})
}
// there is a type mismatch between the source and destination
func (s *cmdIntegrationSuite) TestSyncMismatchBlobAndDirectory(c *chk.C) {
bsu := getBSU()
// set up the container with a single blob
blobName := "singleblobisbest"
blobList := []string{blobName}
containerURL, containerName := createNewContainer(c, bsu)
scenarioHelper{}.generateBlobsFromList(c, containerURL, blobList)
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
// set up the destination as a directory
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawBlobURLWithSAS := scenarioHelper{}.getRawBlobURLWithSAS(c, containerName, blobList[0])
raw := getDefaultSyncRawInput(rawBlobURLWithSAS.String(), dstDirName)
// type mismatch, we should get an error
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.NotNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 0)
})
// reverse the source and destination
raw = getDefaultSyncRawInput(dstDirName, rawBlobURLWithSAS.String())
// type mismatch, we should get an error
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.NotNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 0)
})
}
// download a blob representing an ADLS directory to a local file
// we should recognize that there is a type mismatch
func (s *cmdIntegrationSuite) TestSyncDownloadADLSDirectoryTypeMismatch(c *chk.C) {
bsu := getBSU()
blobName := "adlsdir"
// set up the destination as a single file
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
dstFileName := blobName
scenarioHelper{}.generateLocalFilesFromList(c, dstDirName, []string{blobName})
// set up the container
containerURL, containerName := createNewContainer(c, bsu)
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
// create a single blob that represents an ADLS directory
_, err := containerURL.NewBlockBlobURL(blobName).Upload(context.Background(), bytes.NewReader(nil),
azblob.BlobHTTPHeaders{}, azblob.Metadata{"hdi_isfolder": "true"}, azblob.BlobAccessConditions{})
c.Assert(err, chk.IsNil)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawBlobURLWithSAS := scenarioHelper{}.getRawBlobURLWithSAS(c, containerName, blobName)
raw := getDefaultSyncRawInput(rawBlobURLWithSAS.String(), filepath.Join(dstDirName, dstFileName))
// the file was created after the blob, so no sync should happen
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.NotNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, 0)
})
}
// adls directory -> local directory sync
// we should download every blob except the blob representing the directory
func (s *cmdIntegrationSuite) TestSyncDownloadWithADLSDirectory(c *chk.C) {
bsu := getBSU()
adlsDirName := "adlsdir"
// set up the container with numerous blobs
containerURL, containerName := createNewContainer(c, bsu)
blobList := scenarioHelper{}.generateCommonRemoteScenarioForBlob(c, containerURL, adlsDirName+"/")
defer deleteContainer(c, containerURL)
c.Assert(containerURL, chk.NotNil)
c.Assert(len(blobList), chk.Not(chk.Equals), 0)
// create a single blob that represents the ADLS directory
dirBlob := containerURL.NewBlockBlobURL(adlsDirName)
_, err := dirBlob.Upload(context.Background(), bytes.NewReader(nil),
azblob.BlobHTTPHeaders{}, azblob.Metadata{"hdi_isfolder": "true"}, azblob.BlobAccessConditions{})
c.Assert(err, chk.IsNil)
// create an extra blob that represents an empty ADLS directory, which should never be picked up
_, err = containerURL.NewBlockBlobURL(adlsDirName+"/neverpickup").Upload(context.Background(), bytes.NewReader(nil),
azblob.BlobHTTPHeaders{}, azblob.Metadata{"hdi_isfolder": "true"}, azblob.BlobAccessConditions{})
c.Assert(err, chk.IsNil)
// set up the destination with an empty folder
dstDirName := scenarioHelper{}.generateLocalDirectory(c)
// set up interceptor
mockedRPC := interceptor{}
Rpc = mockedRPC.intercept
mockedRPC.init()
// construct the raw input to simulate user input
rawContainerURLWithSAS := scenarioHelper{}.getRawBlobURLWithSAS(c, containerName, adlsDirName)
raw := getDefaultSyncRawInput(rawContainerURLWithSAS.String(), dstDirName)
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
// validate that the right number of transfers were scheduled
c.Assert(len(mockedRPC.transfers), chk.Equals, len(blobList))
})
// turn off recursive, this time only top blobs should be transferred
raw.recursive = false
mockedRPC.reset()
runSyncAndVerify(c, raw, func(err error) {
c.Assert(err, chk.IsNil)
c.Assert(len(mockedRPC.transfers), chk.Not(chk.Equals), len(blobList))
for _, transfer := range mockedRPC.transfers {
c.Assert(strings.Contains(transfer.Source, common.AZCOPY_PATH_SEPARATOR_STRING), chk.Equals, false)
}
})
}
<file_sep>package cmd
import (
"context"
"errors"
"fmt"
"net/url"
"strings"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-file-go/azfile"
)
type copyDownloadFileEnumerator common.CopyJobPartOrderRequest
// enumerate enumerats file resources, and add transfers accordingly.
// It supports equivalent functions as blob enumerator.
func (e *copyDownloadFileEnumerator) enumerate(cca *cookedCopyCmdArgs) error {
ctx := context.TODO()
// attempt to parse the source url
sourceURL, err := url.Parse(gCopyUtil.replaceBackSlashWithSlash(cca.source))
if err != nil {
return errors.New("cannot parse source URL")
}
// append the sas at the end of query params.
sourceURL = gCopyUtil.appendQueryParamToUrl(sourceURL, cca.sourceSAS)
// Create pipeline for source Azure File service.
// Note: only anonymous credential is supported for file source(i.e. SAS) now.
srcCredInfo := common.CredentialInfo{CredentialType: common.ECredentialType.Anonymous()}
srcFilePipeline, err := createFilePipeline(ctx, srcCredInfo)
if err != nil {
return err
}
srcFileURLPartExtension := fileURLPartsExtension{azfile.NewFileURLParts(*sourceURL)}
// Case-1: Source is single file
srcFileURL := azfile.NewFileURL(*sourceURL, srcFilePipeline)
// Verify if source is a single file
if srcFileURLPartExtension.isFileSyntactically() {
if fileProperties, err := srcFileURL.GetProperties(ctx); err == nil {
var singleFileDestinationPath string
if gCopyUtil.isPathALocalDirectory(cca.destination) {
singleFileDestinationPath = gCopyUtil.generateLocalPath(
cca.destination, gCopyUtil.getFileNameFromPath(sourceURL.Path))
} else {
singleFileDestinationPath = cca.destination
}
if err := e.addDownloadFileTransfer(srcFileURL.URL(), singleFileDestinationPath, fileProperties, cca); err != nil {
return err
}
return e.dispatchFinalPart(cca)
} else {
if isFatal := handleSingleFileValidationErrorForAzureFile(err); isFatal {
return err
}
}
}
glcm.Info(infoCopyFromDirectoryListOfFiles)
// Case-2: Source is a file share or directory
// The destination must be a directory, when source is share or directory.
if !gCopyUtil.isPathALocalDirectory(cca.destination) && !strings.EqualFold(cca.destination, common.Dev_Null) {
return fmt.Errorf("the destination must be an existing directory in this download scenario")
}
searchPrefix, fileNamePattern, isWildcardSearch := srcFileURLPartExtension.searchPrefixFromFileURL()
if fileNamePattern == "*" && !cca.recursive && !isWildcardSearch {
return fmt.Errorf("cannot copy the entire share or directory without recursive flag. Please use --recursive flag")
}
if err := e.addTransfersFromDirectory(ctx,
azfile.NewShareURL(srcFileURLPartExtension.getShareURL(), srcFilePipeline).NewRootDirectoryURL(),
cca.destination,
searchPrefix,
fileNamePattern,
srcFileURLPartExtension.getParentSourcePath(),
isWildcardSearch,
cca); err != nil {
return err
}
// If part number is 0 && number of transfer queued is 0
// it means that no job part has been dispatched and there are no
// transfer in Job to dispatch a JobPart.
if e.PartNum == 0 && len(e.Transfers) == 0 {
return fmt.Errorf("no transfer queued to copy. Please verify the source / destination")
}
// dispatch the JobPart as Final Part of the Job
return e.dispatchFinalPart(cca)
}
// addTransfersFromDirectory enumerates files in directory and sub directoreis,
// and adds matched file into transfer.
func (e *copyDownloadFileEnumerator) addTransfersFromDirectory(
ctx context.Context, srcDirectoryURL azfile.DirectoryURL,
destBasePath, fileOrDirNamePrefix, fileNamePattern, parentSourcePath string,
isWildcardSearch bool, cca *cookedCopyCmdArgs) error {
fileFilter := func(fileItem azfile.FileItem, fileURL azfile.FileURL) bool {
fileURLPart := azfile.NewFileURLParts(fileURL.URL())
// Check if file name matches pattern.
if !gCopyUtil.matchBlobNameAgainstPattern(fileNamePattern, fileURLPart.DirectoryOrFilePath, cca.recursive) {
return false
}
// Check the file should be included or not.
if !gCopyUtil.resourceShouldBeIncluded(parentSourcePath, e.Include, fileURLPart.DirectoryOrFilePath) {
return false
}
// Check the file should be excluded or not.
if gCopyUtil.resourceShouldBeExcluded(parentSourcePath, e.Exclude, fileURLPart.DirectoryOrFilePath) {
return false
}
return true
}
// enumerate files and sub directories in directory, and add matched files into transfer.
return enumerateDirectoriesAndFilesInShare(
ctx,
srcDirectoryURL,
fileOrDirNamePrefix,
cca.recursive,
fileFilter,
func(fileItem azfile.FileItem, fileURL azfile.FileURL) error {
fileURLPart := azfile.NewFileURLParts(fileURL.URL())
var fileRelativePath = ""
// As downloading blob logic temporarily, refactor after scenario ensured.
if isWildcardSearch {
fileRelativePath = strings.Replace(fileURLPart.DirectoryOrFilePath,
fileOrDirNamePrefix[:strings.LastIndex(fileOrDirNamePrefix, common.AZCOPY_PATH_SEPARATOR_STRING)+1], "", 1)
} else {
fileRelativePath = gCopyUtil.getRelativePath(fileOrDirNamePrefix, fileURLPart.DirectoryOrFilePath)
}
// TODO: Remove get attribute, when file's list method can return property and metadata.
p, err := fileURL.GetProperties(ctx)
if err != nil {
return err
}
return e.addDownloadFileTransfer(
fileURL.URL(),
gCopyUtil.generateLocalPath(destBasePath, fileRelativePath),
p,
cca)
})
}
func (e *copyDownloadFileEnumerator) addDownloadFileTransfer(srcURL url.URL, destPath string,
properties *azfile.FileGetPropertiesResponse, cca *cookedCopyCmdArgs) error {
return e.addTransfer(common.CopyTransfer{
Source: gCopyUtil.stripSASFromFileShareUrl(srcURL).String(),
Destination: destPath,
LastModifiedTime: properties.LastModified(),
SourceSize: properties.ContentLength(),
ContentMD5: properties.ContentMD5()},
cca)
}
func (e *copyDownloadFileEnumerator) addTransfer(transfer common.CopyTransfer, cca *cookedCopyCmdArgs) error {
// if we are downloading to dev null, we must point to devNull itself, rather than some file under it
if strings.EqualFold(e.DestinationRoot, common.Dev_Null) {
transfer.Destination = ""
}
return addTransfer((*common.CopyJobPartOrderRequest)(e), transfer, cca)
}
func (e *copyDownloadFileEnumerator) dispatchFinalPart(cca *cookedCopyCmdArgs) error {
return dispatchFinalPart((*common.CopyJobPartOrderRequest)(e), cca)
}
<file_sep>package cmd
import (
"context"
"crypto/md5"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
"strings"
"time"
"github.com/Azure/azure-storage-file-go/azfile"
"github.com/spf13/cobra"
)
// TestFileCommand represents the struct to get command
// for validating azcopy operations.
type TestFileCommand struct {
// object is the resource which needs to be validated against a resource in bucket(share/container).
Object string
//Subject is the remote resource against which object needs to be validated.
Subject string
// IsObjectDirectory defines if the object is a directory or not.
// If the object is directory, then validation goes through another path.
IsObjectDirectory bool
// IsRecursive defines if recursive switch is on during transfer.
IsRecursive bool
// Metadata of the file to be validated.
MetaData string
// NoGuessMimeType represent the azcopy NoGuessMimeType flag set while uploading the file.
NoGuessMimeType bool
// Content Type of the file to be validated.
ContentType string
// Content Encoding of the file to be validated.
ContentEncoding string
// Represents the flag to determine whether number of blocks or pages needs
// to be verified or not.
// todo always set this to true
VerifyBlockOrPageSize bool
// FileType of the resource to be validated.
FileType string
// Number of Blocks or Pages Expected from the file.
NumberOfBlocksOrPages uint64
// todo : numberofblockorpages can be an array with offset : end url.
//todo consecutive page ranges get squashed.
// PreserveLastModifiedTime represents the azcopy PreserveLastModifiedTime flag while downloading the file.
PreserveLastModifiedTime bool
}
// initializes the testfile command, its aliases and description.
// also adds the possible flags that can be supplied with testFile command.
func init() {
cmdInput := TestFileCommand{}
testFileCmd := &cobra.Command{
Use: "testFile",
Aliases: []string{"tFile"},
Short: "tests the file created using AZCopy v2",
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 2 {
return fmt.Errorf("invalid arguments for test file command")
}
// first argument is the resource name.
cmdInput.Object = args[0]
// second argument is the test directory.
cmdInput.Subject = args[1]
return nil
},
Run: func(cmd *cobra.Command, args []string) {
verifyFile(cmdInput)
},
}
rootCmd.AddCommand(testFileCmd)
// add flags.
testFileCmd.PersistentFlags().StringVar(&cmdInput.MetaData, "metadata", "", "metadata expected from the file in the container")
testFileCmd.PersistentFlags().StringVar(&cmdInput.ContentType, "content-type", "", "content type expected from the file in the container")
testFileCmd.PersistentFlags().StringVar(&cmdInput.ContentEncoding, "content-encoding", "", "Upload to Azure Storage using this content encoding.")
testFileCmd.PersistentFlags().BoolVar(&cmdInput.IsObjectDirectory, "is-object-dir", false, "set the type of object to verify against the subject")
testFileCmd.PersistentFlags().BoolVar(&cmdInput.IsRecursive, "is-recursive", true, "Set whether to validate against subject recursively when object is directory.")
// TODO: parameter name doesn't match file scenario, discuss and refactor.
testFileCmd.PersistentFlags().Uint64Var(&cmdInput.NumberOfBlocksOrPages, "number-blocks-or-pages", 0, "Use this block size to verify the number of blocks uploaded")
testFileCmd.PersistentFlags().BoolVar(&cmdInput.VerifyBlockOrPageSize, "verify-block-size", false, "this flag verify the block size by determining the number of blocks")
testFileCmd.PersistentFlags().BoolVar(&cmdInput.NoGuessMimeType, "no-guess-mime-type", false, "This sets the content-type based on the extension of the file.")
testFileCmd.PersistentFlags().BoolVar(&cmdInput.PreserveLastModifiedTime, "preserve-last-modified-time", false, "Only available when destination is file system.")
}
// verify the file downloaded or uploaded.
func verifyFile(testFileCmd TestFileCommand) {
if testFileCmd.IsObjectDirectory {
verifyFileDirUpload(testFileCmd)
} else {
verifySingleFileUpload(testFileCmd)
}
}
// verifyFileDirUpload verifies the directory recursively uploaded to the share or directory.
func verifyFileDirUpload(testFileCmd TestFileCommand) {
// parse the subject url.
sasURL, err := url.Parse(testFileCmd.Subject)
if err != nil {
fmt.Println("fail to parse the container sas ", testFileCmd.Subject)
os.Exit(1)
}
// as it's a directory validation, regard the sasURL as a directory
p := azfile.NewPipeline(azfile.NewAnonymousCredential(), azfile.PipelineOptions{})
directoryURL := azfile.NewDirectoryURL(*sasURL, p)
// get the original dir path, which can be used to get file relative path during enumerating and comparing
baseAzureDirPath := azfile.NewFileURLParts(*sasURL).DirectoryOrFilePath
// validate azure directory
validateAzureDirWithLocalFile(directoryURL, baseAzureDirPath, testFileCmd.Object, testFileCmd.IsRecursive)
}
// recursively validate files in azure directories and sub-directories
func validateAzureDirWithLocalFile(curAzureDirURL azfile.DirectoryURL, baseAzureDirPath string, localBaseDir string, isRecursive bool) {
for marker := (azfile.Marker{}); marker.NotDone(); {
// look for all files that in current directory
listFile, err := curAzureDirURL.ListFilesAndDirectoriesSegment(context.Background(), marker, azfile.ListFilesAndDirectoriesOptions{})
if err != nil {
fmt.Println(fmt.Sprintf("fail to list files and directories inside the directory. Please check the directory sas, %v", err))
os.Exit(1)
}
if isRecursive {
for _, dirInfo := range listFile.DirectoryItems {
newDirURL := curAzureDirURL.NewDirectoryURL(dirInfo.Name)
validateAzureDirWithLocalFile(newDirURL, baseAzureDirPath, localBaseDir, isRecursive)
}
}
// Process the files returned in this result segment (if the segment is empty, the loop body won't execute)
for _, fileInfo := range listFile.FileItems {
curFileURL := curAzureDirURL.NewFileURL(fileInfo.Name)
get, err := curFileURL.Download(context.Background(), 0, azfile.CountToEnd, false)
if err != nil {
fmt.Println(fmt.Sprintf("fail to download the file %s", fileInfo.Name))
os.Exit(1)
}
retryReader := get.Body(azfile.RetryReaderOptions{MaxRetryRequests: 3})
// read all bytes.
fileBytesDownloaded, err := ioutil.ReadAll(retryReader)
if err != nil {
fmt.Println(fmt.Sprintf("fail to read the body of file %s downloaded and failed with error %s", fileInfo.Name, err.Error()))
os.Exit(1)
}
retryReader.Close()
tokens := strings.SplitAfterN(curFileURL.URL().Path, baseAzureDirPath, 2)
if len(tokens) < 2 {
fmt.Println(fmt.Sprintf("fail to get sub directory and file name, file URL '%s', original dir path '%s'", curFileURL.String(), baseAzureDirPath))
os.Exit(1)
}
subDirAndFileName := tokens[1]
var objectLocalPath string
if subDirAndFileName != "" && subDirAndFileName[0] != '/' {
objectLocalPath = localBaseDir + "/" + subDirAndFileName
} else {
objectLocalPath = localBaseDir + subDirAndFileName
}
// opening the file locally and memory mapping it.
sFileInfo, err := os.Stat(objectLocalPath)
if err != nil {
fmt.Println("fail to get the subject file info on local disk ")
os.Exit(1)
}
sFile, err := os.Open(objectLocalPath)
if err != nil {
fmt.Println("fail to open file ", sFile)
os.Exit(1)
}
sMap, err := NewMMF(sFile, false, 0, sFileInfo.Size())
if err != nil {
fmt.Println("fail to memory mapping the file ", sFileInfo.Name())
}
// calculating the md5 of file on container.
actualMd5 := md5.Sum(fileBytesDownloaded)
// calculating md5 of resource locally.
expectedMd5 := md5.Sum(sMap)
if actualMd5 != expectedMd5 {
fmt.Println("the upload file md5 is not equal to the md5 of actual file on disk for file ", fileInfo.Name)
os.Exit(1)
}
}
marker = listFile.NextMarker
}
}
// validateMetadataForFile compares the meta data provided while
// uploading and metadata with file in the container.
func validateMetadataForFile(expectedMetaDataString string, actualMetaData azfile.Metadata) bool {
if len(expectedMetaDataString) > 0 {
// split the meta data string to get the map of key value pair
// metadata string is in format key1=value1;key2=value2;key3=value3
expectedMetaData := azfile.Metadata{}
// split the metadata to get individual keyvalue pair in format key1=value1
keyValuePair := strings.Split(expectedMetaDataString, ";")
for index := 0; index < len(keyValuePair); index++ {
// split the individual key value pair to get key and value
keyValue := strings.Split(keyValuePair[index], "=")
expectedMetaData[keyValue[0]] = keyValue[1]
}
// if number of metadata provided while uploading
// doesn't match the metadata with file on the container
if len(expectedMetaData) != len(actualMetaData) {
fmt.Println("number of user given key value pair of the actual metadata differs from key value pair of expected metaData")
return false
}
// iterating through each key value pair of actual metaData and comparing the key value pair in expected metadata
for key, value := range actualMetaData {
if expectedMetaData[key] != value {
fmt.Println(fmt.Sprintf("value of user given key %s is %s in actual data while it is %s in expected metadata", key, value, expectedMetaData[key]))
return false
}
}
} else {
if len(actualMetaData) > 0 {
return false
}
}
return true
}
// verifySingleFileUpload verifies the pagefile uploaded or downloaded
// against the file locally.
func verifySingleFileUpload(testFileCmd TestFileCommand) {
fileInfo, err := os.Stat(testFileCmd.Object)
if err != nil {
fmt.Println("error opening the destination file on local disk ")
os.Exit(1)
}
file, err := os.Open(testFileCmd.Object)
if err != nil {
fmt.Println("error opening the file ", testFileCmd.Object)
}
// getting the shared access signature of the resource.
sourceURL, err := url.Parse(testFileCmd.Subject)
if err != nil {
fmt.Println(fmt.Sprintf("Error parsing the file url source %s", testFileCmd.Object))
os.Exit(1)
}
// creating the page file url of the resource on container.
p := azfile.NewPipeline(azfile.NewAnonymousCredential(), azfile.PipelineOptions{Retry: azfile.RetryOptions{TryTimeout: time.Minute * 10}})
fileURL := azfile.NewFileURL(*sourceURL, p)
get, err := fileURL.Download(context.Background(), 0, azfile.CountToEnd, false)
if err != nil {
fmt.Println("unable to get file properties ", err.Error())
os.Exit(1)
}
// reading all the bytes downloaded.
retryReader := get.Body(azfile.RetryReaderOptions{MaxRetryRequests: 3})
defer retryReader.Close()
fileBytesDownloaded, err := ioutil.ReadAll(retryReader)
if err != nil {
fmt.Println("error reading the byes from response and failed with error ", err.Error())
os.Exit(1)
}
if fileInfo.Size() == 0 {
// If the fileSize is 0 and the len of downloaded bytes is not 0
// validation fails
if len(fileBytesDownloaded) != 0 {
fmt.Println(fmt.Sprintf("validation failed since the actual file size %d differs from the downloaded file size %d", fileInfo.Size(), len(fileBytesDownloaded)))
os.Exit(1)
}
// If both the actual and downloaded file size is 0,
// validation is successful, no need to match the md5
os.Exit(0)
}
// memory mapping the resource on local path.
mmap, err := NewMMF(file, false, 0, fileInfo.Size())
if err != nil {
fmt.Println("error mapping the destination file: ", file, " file size: ", fileInfo.Size(), " Error: ", err.Error())
os.Exit(1)
}
// calculating and verify the md5 of the resource
// both locally and on the container.
actualMd5 := md5.Sum(mmap)
expectedMd5 := md5.Sum(fileBytesDownloaded)
if actualMd5 != expectedMd5 {
fmt.Println("the uploaded file's md5 doesn't matches the actual file's md5 for file ", testFileCmd.Object)
os.Exit(1)
}
// verify the user given metadata supplied while uploading the file against the metadata actually present in the file
if !validateMetadataForFile(testFileCmd.MetaData, get.NewMetadata()) {
fmt.Println("meta data does not match between the actual and uploaded file.")
os.Exit(1)
}
// verify the content-type
expectedContentType := ""
if testFileCmd.NoGuessMimeType {
expectedContentType = testFileCmd.ContentType
} else {
expectedContentType = http.DetectContentType(mmap)
}
if !validateString(expectedContentType, get.ContentType()) {
fmt.Println("mismatch content type between actual and user given file content type")
os.Exit(1)
}
//verify the content-encoding
if !validateString(testFileCmd.ContentEncoding, get.ContentEncoding()) {
fmt.Println("mismatch content encoding between actual and user given file content encoding")
os.Exit(1)
}
mmap.Unmap()
file.Close()
// verify the number of pageranges.
// this verifies the page-size and azcopy pagefile implementation.
if testFileCmd.VerifyBlockOrPageSize {
numberOfPages := int(testFileCmd.NumberOfBlocksOrPages)
resp, err := fileURL.GetRangeList(context.Background(), 0, azfile.CountToEnd)
if err != nil {
fmt.Println("error getting the range list ", err.Error())
os.Exit(1)
}
if numberOfPages != (len(resp.Items)) {
fmt.Println("number of ranges uploaded is different from the number of expected to be uploaded")
os.Exit(1)
}
}
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
// This file contains credential utils used only in cmd module.
package cmd
import (
"context"
"errors"
"fmt"
"net/url"
"os"
"sync"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/azbfs"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-azcopy/ste"
"github.com/Azure/azure-storage-file-go/azfile"
"github.com/Azure/azure-storage-blob-go/azblob"
)
var once sync.Once
// only one UserOAuthTokenManager should exists in azcopy-v2 process in cmd(FE) module for current user.
// (given appAppPathFolder is mapped to current user)
var currentUserOAuthTokenManager *common.UserOAuthTokenManager
const oauthLoginSessionCacheKeyName = "AzCopyOAuthTokenCache"
const oauthLoginSessionCacheServiceName = "AzCopyV10"
const oauthLoginSessionCacheAccountName = "AzCopyOAuthTokenCache"
// GetUserOAuthTokenManagerInstance gets or creates OAuthTokenManager for current user.
// Note: Currently, only support to have TokenManager for one user mapping to one tenantID.
func GetUserOAuthTokenManagerInstance() *common.UserOAuthTokenManager {
once.Do(func() {
if azcopyAppPathFolder == "" {
panic("invalid state, azcopyAppPathFolder should be initialized by root")
}
currentUserOAuthTokenManager = common.NewUserOAuthTokenManagerInstance(common.CredCacheOptions{
DPAPIFilePath: azcopyAppPathFolder,
KeyName: oauthLoginSessionCacheKeyName,
ServiceName: oauthLoginSessionCacheServiceName,
AccountName: oauthLoginSessionCacheAccountName,
})
})
return currentUserOAuthTokenManager
}
// ==============================================================================================
// Get credential type methods
// ==============================================================================================
// getBlobCredentialType is used to get Blob's credential type when user wishes to use OAuth session mode.
// The verification logic follows following rules:
// 1. For source or dest url, if the url contains SAS or SAS is provided standalone, indicating using anonymous credential(SAS).
// 2. If the blob URL can be public access resource, and validated as public resource, indicating using anonymous credential(public resource).
// 3. If there is cached OAuth token, indicating using token credential.
// 4. If there is OAuth token info passed from env var, indicating using token credential. (Note: this is only for testing)
// 5. Otherwise use anonymous credential.
// The implementaion logic follows above rule, and adjusts sequence to save web request(for verifying public resource).
func getBlobCredentialType(ctx context.Context, blobResourceURL string, canBePublic bool, standaloneSAS bool) (common.CredentialType, error) {
resourceURL, err := url.Parse(blobResourceURL)
if err != nil {
return common.ECredentialType.Unknown(), errors.New("provided blob resource string is not in URL format")
}
sas := azblob.NewBlobURLParts(*resourceURL).SAS
// If SAS existed, return anonymous credential type.
if isSASExisted := sas.Signature() != ""; isSASExisted || standaloneSAS {
return common.ECredentialType.Anonymous(), nil
}
// If SAS token doesn't exist, it could be using OAuth token or the resource is public.
if !oAuthTokenExists() { // no oauth token found, then directly return anonymous credential
return common.ECredentialType.Anonymous(), nil
} else if !canBePublic { // oauth token found, if it can not be public resource, return token credential
return common.ECredentialType.OAuthToken(), nil
} else { // check if it's public resource, and return credential type correspondingly
// If has cached token, and no SAS token provided, it could be a public blob resource.
p := azblob.NewPipeline(
azblob.NewAnonymousCredential(),
azblob.PipelineOptions{
Retry: azblob.RetryOptions{
Policy: azblob.RetryPolicyExponential,
MaxTries: ste.UploadMaxTries,
TryTimeout: ste.UploadTryTimeout,
RetryDelay: ste.UploadRetryDelay,
MaxRetryDelay: ste.UploadMaxRetryDelay,
},
})
isContainer := copyHandlerUtil{}.urlIsContainerOrShare(resourceURL)
isPublicResource := false
if isContainer {
containerURL := azblob.NewContainerURL(*resourceURL, p)
if _, err := containerURL.GetProperties(ctx, azblob.LeaseAccessConditions{}); err == nil {
isPublicResource = true
}
} else {
blobURL := azblob.NewBlobURL(*resourceURL, p)
if _, err := blobURL.GetProperties(ctx, azblob.BlobAccessConditions{}); err == nil {
isPublicResource = true
}
}
if isPublicResource {
return common.ECredentialType.Anonymous(), nil
} else {
return common.ECredentialType.OAuthToken(), nil
}
}
}
// getBlobFSCredentialType is used to get BlobFS's credential type when user wishes to use OAuth session mode.
// The verification logic follows following rules:
// 1. If there is cached session OAuth token, indicating using token credential.
// 2. If there is OAuth token info passed from env var, indicating using token credential. (Note: this is only for testing)
// 3. Otherwise use shared key.
func getBlobFSCredentialType() (common.CredentialType, error) {
if oAuthTokenExists() {
return common.ECredentialType.OAuthToken(), nil
}
name := os.Getenv("ACCOUNT_NAME")
key := os.Getenv("ACCOUNT_KEY")
if name != "" && key != "" { // TODO: To remove, use for internal testing, SharedKey should not be supported from commandline
return common.ECredentialType.SharedKey(), nil
} else {
return common.ECredentialType.Unknown(), errors.New("OAuth token or shared key should be provided for Blob FS")
}
}
func oAuthTokenExists() (oauthTokenExists bool) {
// Note: Environment variable for OAuth token should only be used in testing, or the case user clearly now how to protect
// the tokens
if common.EnvVarOAuthTokenInfoExists() {
glcm.Info(fmt.Sprintf("%v is set.", common.EnvVarOAuthTokenInfo)) // Log the case when env var is set, as it's rare case.
oauthTokenExists = true
}
uotm := GetUserOAuthTokenManagerInstance()
if hasCachedToken, err := uotm.HasCachedToken(); hasCachedToken {
oauthTokenExists = true
} else if err != nil {
// Log the error if fail to get cached token, as these are unhandled errors, and should not influence the logic flow.
// Uncomment for debugging.
// glcm.Info(fmt.Sprintf("No cached token found, %v", err))
}
return
}
// getAzureFileCredentialType is used to get Azure file's credential type
func getAzureFileCredentialType() (common.CredentialType, error) {
// Azure file only support anonymous credential currently.
return common.ECredentialType.Anonymous(), nil
}
// envVarCredentialType used for passing credential type into AzCopy through environment variable.
// Note: This is only used for internal integration, and not encouraged to be used directly.
const envVarCredentialType = "AZCOPY_CRED_TYPE"
// GetCredTypeFromEnvVar tries to get credential type from environment variable defined by envVarCredentialType.
func GetCredTypeFromEnvVar() common.CredentialType {
rawVal := os.Getenv(envVarCredentialType)
if rawVal == "" {
return common.ECredentialType.Unknown()
}
// Remove the env var after successfully fetching once,
// in case of env var is further spreading into child processes unexpectly.
os.Setenv(envVarCredentialType, "")
// Try to get the value set.
var credType common.CredentialType
if err := credType.Parse(rawVal); err != nil {
return common.ECredentialType.Unknown()
}
return credType
}
type rawFromToInfo struct {
fromTo common.FromTo
source, destination string
sourceSAS, destinationSAS string // Standalone SAS which might be provided
}
// getCredentialType checks user provided info, and gets the proper credential type
// for current command.
func getCredentialType(ctx context.Context, raw rawFromToInfo) (credentialType common.CredentialType, err error) {
// In the integration case, AzCopy directly use caller provided credential type if specified and not Unknown.
if credType := GetCredTypeFromEnvVar(); credType != common.ECredentialType.Unknown() {
return credType, nil
}
// Could be using oauth session mode or non-oauth scenario which uses SAS authentication or public endpoint,
// verify credential type with cached token info, src or dest resource URL.
switch raw.fromTo {
case common.EFromTo.BlobBlob(), common.EFromTo.FileBlob(), common.EFromTo.S3Blob():
// For blob/file to blob copy, calculate credential type for destination (currently only support StageBlockFromURL)
// If the traditional approach(download+upload) need be supported, credential type should be calculated for both src and dest.
fallthrough
case common.EFromTo.LocalBlob(), common.EFromTo.PipeBlob():
if credentialType, err = getBlobCredentialType(ctx, raw.destination, false, raw.destinationSAS != ""); err != nil {
return common.ECredentialType.Unknown(), err
}
case common.EFromTo.BlobTrash():
// For BlobTrash direction, use source as resource URL, and it should not be public access resource.
if credentialType, err = getBlobCredentialType(ctx, raw.source, false, raw.sourceSAS != ""); err != nil {
return common.ECredentialType.Unknown(), err
}
case common.EFromTo.BlobLocal(), common.EFromTo.BlobPipe():
if credentialType, err = getBlobCredentialType(ctx, raw.source, true, raw.sourceSAS != ""); err != nil {
return common.ECredentialType.Unknown(), err
}
case common.EFromTo.LocalBlobFS(), common.EFromTo.BlobFSLocal():
if credentialType, err = getBlobFSCredentialType(); err != nil {
return common.ECredentialType.Unknown(), err
}
case common.EFromTo.LocalFile(), common.EFromTo.FileLocal(), common.EFromTo.FileTrash(), common.EFromTo.FilePipe(), common.EFromTo.PipeFile():
if credentialType, err = getAzureFileCredentialType(); err != nil {
return common.ECredentialType.Unknown(), err
}
default:
credentialType = common.ECredentialType.Anonymous()
// Log the FromTo types which getCredentialType hasn't solved, in case of miss-use.
glcm.Info(fmt.Sprintf("Use anonymous credential by default for from-to '%v'", raw.fromTo))
}
return credentialType, nil
}
// ==============================================================================================
// pipeline factory methods
// ==============================================================================================
func createBlobPipeline(ctx context.Context, credInfo common.CredentialInfo) (pipeline.Pipeline, error) {
credential := common.CreateBlobCredential(ctx, credInfo, common.CredentialOpOptions{
//LogInfo: glcm.Info, //Comment out for debugging
LogError: glcm.Info,
})
return ste.NewBlobPipeline(
credential,
azblob.PipelineOptions{
Telemetry: azblob.TelemetryOptions{
Value: common.UserAgent,
},
},
ste.XferRetryOptions{
Policy: 0,
MaxTries: ste.UploadMaxTries,
TryTimeout: ste.UploadTryTimeout,
RetryDelay: ste.UploadRetryDelay,
MaxRetryDelay: ste.UploadMaxRetryDelay,
},
nil,
ste.NewAzcopyHTTPClient()), nil
}
func createBlobFSPipeline(ctx context.Context, credInfo common.CredentialInfo) (pipeline.Pipeline, error) {
credential := common.CreateBlobFSCredential(ctx, credInfo, common.CredentialOpOptions{
//LogInfo: glcm.Info, //Comment out for debugging
LogError: glcm.Info,
})
return azbfs.NewPipeline(
credential,
azbfs.PipelineOptions{
Retry: azbfs.RetryOptions{
Policy: azbfs.RetryPolicyExponential,
MaxTries: ste.UploadMaxTries,
TryTimeout: ste.UploadTryTimeout,
RetryDelay: ste.UploadRetryDelay,
MaxRetryDelay: ste.UploadMaxRetryDelay,
},
Telemetry: azbfs.TelemetryOptions{
Value: common.UserAgent,
},
}), nil
}
// TODO note: ctx and credInfo are ignored at the moment because we only support SAS for Azure File
func createFilePipeline(ctx context.Context, credInfo common.CredentialInfo) (pipeline.Pipeline, error) {
return azfile.NewPipeline(
azfile.NewAnonymousCredential(),
azfile.PipelineOptions{
Retry: azfile.RetryOptions{
Policy: azfile.RetryPolicyExponential,
MaxTries: ste.UploadMaxTries,
TryTimeout: ste.UploadTryTimeout,
RetryDelay: ste.UploadRetryDelay,
MaxRetryDelay: ste.UploadMaxRetryDelay,
},
Telemetry: azfile.TelemetryOptions{
Value: common.UserAgent,
},
}), nil
}
<file_sep>package azbfs_test
import (
"context"
"fmt"
"net/url"
"os"
"runtime"
"strings"
"testing"
"time"
"github.com/Azure/azure-storage-azcopy/azbfs"
chk "gopkg.in/check.v1"
)
func Test(t *testing.T) { chk.TestingT(t) }
type aztestsSuite struct{}
var _ = chk.Suite(&aztestsSuite{})
const (
fileSystemPrefix = "go"
directoryPrefix = "gotestdirectory"
filePrefix = "gotestfile"
)
var ctx = context.Background()
func getAccountAndKey() (string, string) {
name := os.Getenv("ACCOUNT_NAME")
key := os.Getenv("ACCOUNT_KEY")
if name == "" || key == "" {
panic("ACCOUNT_NAME and ACCOUNT_KEY environment vars must be set before running tests")
}
return name, key
}
func getBfsServiceURL() azbfs.ServiceURL {
name, key := getAccountAndKey()
u, _ := url.Parse(fmt.Sprintf("https://%s.dfs.core.windows.net/", name))
credential := azbfs.NewSharedKeyCredential(name, key)
pipeline := azbfs.NewPipeline(credential, azbfs.PipelineOptions{})
return azbfs.NewServiceURL(*u, pipeline)
}
// This function generates an entity name by concatenating the passed prefix,
// the name of the test requesting the entity name, and the minute, second, and nanoseconds of the call.
// This should make it easy to associate the entities with their test, uniquely identify
// them, and determine the order in which they were created.
// Note that this imposes a restriction on the length of test names
func generateName(prefix string) string {
// These next lines up through the for loop are obtaining and walking up the stack
// trace to extrat the test name, which is stored in name
pc := make([]uintptr, 10)
runtime.Callers(0, pc)
f := runtime.FuncForPC(pc[0])
name := f.Name()
for i := 0; !strings.Contains(name, "Suite"); i++ { // The tests are all scoped to the suite, so this ensures getting the actual test name
f = runtime.FuncForPC(pc[i])
name = f.Name()
}
funcNameStart := strings.Index(name, "Test")
name = name[funcNameStart+len("Test"):] // Just get the name of the test and not any of the garbage at the beginning
name = strings.ToLower(name) // Ensure it is a valid resource name
currentTime := time.Now()
name = fmt.Sprintf("%s%s%d%d%d", prefix, strings.ToLower(name), currentTime.Minute(), currentTime.Second(), currentTime.Nanosecond())
return name
}
func generateFileSystemName() string {
return generateName(fileSystemPrefix)
}
func generateDirectoryName() string {
return generateName(directoryPrefix)
}
func generateFileName() string {
return generateName(filePrefix)
}
func getFileSystemURL(c *chk.C, fsu azbfs.ServiceURL) (fs azbfs.FileSystemURL, name string) {
name = generateFileSystemName()
fs = fsu.NewFileSystemURL(name)
return fs, name
}
func getDirectoryURLFromFileSystem(c *chk.C, fs azbfs.FileSystemURL) (directory azbfs.DirectoryURL, name string) {
name = generateDirectoryName()
directory = fs.NewDirectoryURL(name)
return directory, name
}
func getDirectoryURLFromDirectory(c *chk.C, parentDirectory azbfs.DirectoryURL) (directory azbfs.DirectoryURL, name string) {
name = generateDirectoryName()
directory = parentDirectory.NewDirectoryURL(name)
return directory, name
}
// This is a convenience method, No public API to create file URL from fileSystem now. This method uses fileSystem's root directory.
func getFileURLFromFileSystem(c *chk.C, fs azbfs.FileSystemURL) (file azbfs.FileURL, name string) {
name = generateFileName()
file = fs.NewRootDirectoryURL().NewFileURL(name)
return file, name
}
func getFileURLFromDirectory(c *chk.C, directory azbfs.DirectoryURL) (file azbfs.FileURL, name string) {
name = generateFileName()
file = directory.NewFileURL(name)
return file, name
}
func createNewFileSystem(c *chk.C, fsu azbfs.ServiceURL) (fs azbfs.FileSystemURL, name string) {
fs, name = getFileSystemURL(c, fsu)
cResp, err := fs.Create(ctx)
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, 201)
return fs, name
}
func createNewDirectoryFromFileSystem(c *chk.C, fileSystem azbfs.FileSystemURL) (dir azbfs.DirectoryURL, name string) {
dir, name = getDirectoryURLFromFileSystem(c, fileSystem)
cResp, err := dir.Create(ctx)
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, 201)
return dir, name
}
// This is a convenience method, No public API to create file URL from fileSystem now. This method uses fileSystem's root directory.
func createNewFileFromFileSystem(c *chk.C, fileSystem azbfs.FileSystemURL) (file azbfs.FileURL, name string) {
dir := fileSystem.NewRootDirectoryURL()
file, name = getFileURLFromDirectory(c, dir)
cResp, err := file.Create(ctx, azbfs.BlobFSHTTPHeaders{})
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, 201)
return file, name
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"context"
"net/url"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
)
type urlToPageBlobCopier struct {
pageBlobSenderBase
srcURL url.URL
}
func newURLToPageBlobCopier(jptm IJobPartTransferMgr, destination string, p pipeline.Pipeline, pacer *pacer, srcInfoProvider IRemoteSourceInfoProvider) (s2sCopier, error) {
destBlobTier := azblob.AccessTierNone
// If the source is page blob, preserve source's blob tier.
if blobSrcInfoProvider, ok := srcInfoProvider.(IBlobSourceInfoProvider); ok {
if blobSrcInfoProvider.BlobType() == azblob.BlobPageBlob {
destBlobTier = blobSrcInfoProvider.BlobTier()
}
}
senderBase, err := newPageBlobSenderBase(jptm, destination, p, pacer, srcInfoProvider, destBlobTier)
if err != nil {
return nil, err
}
srcURL, err := srcInfoProvider.PreSignedSourceURL()
if err != nil {
return nil, err
}
return &urlToPageBlobCopier{
pageBlobSenderBase: *senderBase,
srcURL: *srcURL}, nil
}
// Returns a chunk-func for blob copies
func (c *urlToPageBlobCopier) GenerateCopyFunc(id common.ChunkID, blockIndex int32, adjustedChunkSize int64, chunkIsWholeFile bool) chunkFunc {
return createSendToRemoteChunkFunc(c.jptm, id, func() {
if c.jptm.Info().SourceSize == 0 {
// nothing to do, since this is a dummy chunk in a zero-size file, and the prologue will have done all the real work
return
}
s2sPacer := newS2SPacer(c.pacer)
// control rate of sending (since page blobs can effectively have per-blob throughput limits)
c.jptm.LogChunkStatus(id, common.EWaitReason.FilePacer())
if err := c.filePacer.RequestRightToSend(c.jptm.Context(), adjustedChunkSize); err != nil {
c.jptm.FailActiveUpload("Pacing block", err)
}
// set the latest service version from sdk as service version in the context, to use UploadPagesFromURL API.
// AND enrich the context for 503 (ServerBusy) detection
enrichedContext := withRetryNotification(
context.WithValue(c.jptm.Context(), ServiceAPIVersionOverride, azblob.ServiceVersion),
c.filePacer)
// upload the page
c.jptm.LogChunkStatus(id, common.EWaitReason.S2SCopyOnWire())
_, err := c.destPageBlobURL.UploadPagesFromURL(
enrichedContext, c.srcURL, id.OffsetInFile, id.OffsetInFile, adjustedChunkSize, azblob.PageBlobAccessConditions{}, nil)
if err != nil {
c.jptm.FailActiveS2SCopy("Uploading page from URL", err)
return
}
s2sPacer.Done(adjustedChunkSize)
})
}
<file_sep>package common
const AzcopyVersion = "10.1.2"
const UserAgent = "AzCopy/" + AzcopyVersion
const S3ImportUserAgent = "S3Import " + UserAgent
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"errors"
"github.com/Azure/azure-storage-azcopy/ste"
"net/url"
"strings"
)
func newBlobTraverserForRemove(cca *cookedCopyCmdArgs) (t *blobTraverser, err error) {
ctx := context.WithValue(context.TODO(), ste.ServiceAPIVersionOverride, ste.DefaultServiceApiVersion)
rawURL, err := url.Parse(cca.source)
if err == nil && cca.sourceSAS != "" {
copyHandlerUtil{}.appendQueryParamToUrl(rawURL, cca.sourceSAS)
}
if err != nil {
return
}
if strings.Contains(rawURL.Path, "*") {
return nil, errors.New("illegal URL, no pattern matching allowed for remove command")
}
p, err := createBlobPipeline(ctx, cca.credentialInfo)
if err != nil {
return
}
return newBlobTraverser(rawURL, p, ctx, cca.recursive, nil), nil
}
func newFileTraverserForRemove(cca *cookedCopyCmdArgs) (t *fileTraverser, err error) {
ctx := context.WithValue(context.TODO(), ste.ServiceAPIVersionOverride, ste.DefaultServiceApiVersion)
rawURL, err := url.Parse(cca.source)
if err == nil && cca.sourceSAS != "" {
copyHandlerUtil{}.appendQueryParamToUrl(rawURL, cca.sourceSAS)
}
if err != nil {
return
}
if strings.Contains(rawURL.Path, "*") {
return nil, errors.New("illegal URL, no pattern matching allowed for remove command")
}
p, err := createFilePipeline(ctx, cca.credentialInfo)
if err != nil {
return
}
return newFileTraverser(rawURL, p, ctx, cca.recursive, nil), nil
}
<file_sep>package azbfs_test
import (
"bytes"
"context"
//"crypto/md5"
//"fmt"
//"io/ioutil"
//"net/http"
"net/url"
//"strings"
//"time"
"github.com/Azure/azure-storage-azcopy/azbfs"
chk "gopkg.in/check.v1" // go get gopkg.in/check.v1
"io/ioutil"
"net/http"
)
type FileURLSuite struct{}
var _ = chk.Suite(&FileURLSuite{})
func delFile(c *chk.C, file azbfs.FileURL) {
resp, err := file.Delete(context.Background())
c.Assert(err, chk.IsNil)
c.Assert(resp.Response().StatusCode, chk.Equals, 200)
}
func getRandomDataAndReader(n int) (*bytes.Reader, []byte) {
data := make([]byte, n, n)
for i := 0; i < n; i++ {
data[i] = byte(i)
}
return bytes.NewReader(data), data
}
func (s *FileURLSuite) TestFileNewFileURLNegative(c *chk.C) {
c.Assert(func() { azbfs.NewFileURL(url.URL{}, nil) }, chk.Panics, "p can't be nil")
}
func (s *FileURLSuite) TestFileCreateDelete(c *chk.C) {
fsu := getBfsServiceURL()
fsURL, _ := createNewFileSystem(c, fsu)
defer delFileSystem(c, fsURL)
// Create and delete file in root directory.
file, _ := getFileURLFromFileSystem(c, fsURL)
cResp, err := file.Create(context.Background(), azbfs.BlobFSHTTPHeaders{})
c.Assert(err, chk.IsNil)
c.Assert(cResp.Response().StatusCode, chk.Equals, http.StatusCreated)
c.Assert(cResp.ETag(), chk.Not(chk.Equals), "")
c.Assert(cResp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(cResp.Date(), chk.Not(chk.Equals), "")
delResp, err := file.Delete(context.Background())
c.Assert(err, chk.IsNil)
c.Assert(delResp.Response().StatusCode, chk.Equals, http.StatusOK)
c.Assert(delResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(delResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(delResp.Date(), chk.Not(chk.Equals), "")
dirURL, _ := createNewDirectoryFromFileSystem(c, fsURL)
defer deleteDirectory(c, dirURL)
// Create and delete file in named directory.
file, _ = getFileURLFromDirectory(c, dirURL)
cResp, err = file.Create(context.Background(), azbfs.BlobFSHTTPHeaders{})
c.Assert(err, chk.IsNil)
c.Assert(cResp.Response().StatusCode, chk.Equals, http.StatusCreated)
c.Assert(cResp.ETag(), chk.Not(chk.Equals), "")
c.Assert(cResp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(cResp.Date(), chk.Not(chk.Equals), "")
delResp, err = file.Delete(context.Background())
c.Assert(err, chk.IsNil)
c.Assert(delResp.Response().StatusCode, chk.Equals, http.StatusOK)
c.Assert(delResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(delResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(delResp.Date(), chk.Not(chk.Equals), "")
}
func (s *FileURLSuite) TestFileCreateDeleteNonExistingParent(c *chk.C) {
fsu := getBfsServiceURL()
fsURL, _ := createNewFileSystem(c, fsu)
defer delFileSystem(c, fsURL)
// Create and delete file in directory that does not exist yet.
dirNotExist, _ := getDirectoryURLFromFileSystem(c, fsURL)
file, _ := getFileURLFromDirectory(c, dirNotExist)
// Verify that the file was created even though its parent directory does not exist yet
cResp, err := file.Create(context.Background(), azbfs.BlobFSHTTPHeaders{})
c.Assert(err, chk.IsNil)
c.Assert(cResp.Response().StatusCode, chk.Equals, http.StatusCreated)
c.Assert(cResp.ETag(), chk.Not(chk.Equals), "")
c.Assert(cResp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(cResp.Date(), chk.Not(chk.Equals), "")
// Verify that the parent directory was created successfully
dirResp, err := dirNotExist.GetProperties(context.Background())
c.Assert(err, chk.IsNil)
c.Assert(dirResp.StatusCode(), chk.Equals, http.StatusOK)
}
func (s *FileURLSuite) TestFileGetProperties(c *chk.C) {
fsu := getBfsServiceURL()
fileSystemURL, _ := createNewFileSystem(c, fsu)
defer delFileSystem(c, fileSystemURL)
fileURL, _ := createNewFileFromFileSystem(c, fileSystemURL)
defer delFile(c, fileURL)
getResp, err := fileURL.GetProperties(context.Background())
c.Assert(err, chk.IsNil)
c.Assert(getResp.Response().StatusCode, chk.Equals, http.StatusOK)
c.Assert(getResp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(getResp.XMsResourceType(), chk.Equals, "file")
c.Assert(getResp.ETag(), chk.Not(chk.Equals), "")
c.Assert(getResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(getResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(getResp.Date(), chk.Not(chk.Equals), "")
}
////TODO this is failing on the service side at the moment, the spec is not accurate
//func (s *FileURLSuite) TestCreateFileWithBody(c *chk.C) {
// fsu := getBfsServiceURL()
// fileSystemURL, _ := createNewFileSystem(c, fsu)
// defer delFileSystem(c, fileSystemURL)
//
// fileURL, _ := createNewFileFromFileSystem(c, fileSystemURL, 2048)
// defer delFile(c, fileURL)
//
// contentR, contentD := getRandomDataAndReader(2048)
//
// pResp, err := fileURL.Create(context.Background(), contentR)
// c.Assert(err, chk.IsNil)
// c.Assert(pResp.StatusCode(), chk.Equals, http.StatusCreated)
// c.Assert(pResp.ETag(), chk.Not(chk.Equals), "")
// c.Assert(pResp.LastModified(), chk.Not(chk.Equals), "")
// c.Assert(pResp.XMsRequestID(), chk.Not(chk.Equals), "")
// c.Assert(pResp.XMsVersion(), chk.Not(chk.Equals), "")
// c.Assert(pResp.Date(), chk.Not(chk.Equals), "")
//
// // Get with rangeGetContentMD5 enabled.
// // Partial data, check status code 206.
// resp, err := fileURL.Download(context.Background(), 0, 1024)
// c.Assert(err, chk.IsNil)
// c.Assert(resp.StatusCode(), chk.Equals, http.StatusPartialContent)
// c.Assert(resp.ContentLength(), chk.Equals, "1024")
// c.Assert(resp.ContentType(), chk.Equals, "application/octet-stream")
// c.Assert(resp.Status(), chk.Not(chk.Equals), "")
//
// download, err := ioutil.ReadAll(resp.Response().Body)
// c.Assert(err, chk.IsNil)
// c.Assert(download, chk.DeepEquals, contentD[:1024])
//}
func (s *FileURLSuite) TestUploadDownloadRoundTrip(c *chk.C) {
fsu := getBfsServiceURL()
fileSystemURL, _ := createNewFileSystem(c, fsu)
defer delFileSystem(c, fileSystemURL)
fileURL, _ := createNewFileFromFileSystem(c, fileSystemURL)
defer delFile(c, fileURL)
// The file content will be made up of two parts
contentR1, contentD1 := getRandomDataAndReader(2048)
contentR2, contentD2 := getRandomDataAndReader(2048)
// Append first part
pResp, err := fileURL.AppendData(context.Background(), 0, contentR1)
c.Assert(err, chk.IsNil)
c.Assert(pResp.StatusCode(), chk.Equals, http.StatusAccepted)
c.Assert(pResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(pResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(pResp.Date(), chk.Not(chk.Equals), "")
// Append second part
pResp, err = fileURL.AppendData(context.Background(), 2048, contentR2)
c.Assert(err, chk.IsNil)
c.Assert(pResp.StatusCode(), chk.Equals, http.StatusAccepted)
c.Assert(pResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(pResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(pResp.Date(), chk.Not(chk.Equals), "")
// Flush data
fResp, err := fileURL.FlushData(context.Background(), 4096, make([]byte, 0), azbfs.BlobFSHTTPHeaders{})
c.Assert(err, chk.IsNil)
c.Assert(fResp.StatusCode(), chk.Equals, http.StatusOK)
c.Assert(fResp.ETag(), chk.Not(chk.Equals), "")
c.Assert(fResp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(fResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(fResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(fResp.Date(), chk.Not(chk.Equals), "")
// Get Partial data, check status code 206.
resp, err := fileURL.Download(context.Background(), 0, 1024)
c.Assert(err, chk.IsNil)
c.Assert(resp.StatusCode(), chk.Equals, http.StatusPartialContent)
c.Assert(resp.ContentLength(), chk.Equals, int64(1024))
c.Assert(resp.ContentType(), chk.Equals, "application/octet-stream")
c.Assert(resp.Status(), chk.Not(chk.Equals), "")
// Verify the partial data
download, err := ioutil.ReadAll(resp.Response().Body)
c.Assert(err, chk.IsNil)
c.Assert(download, chk.DeepEquals, contentD1[:1024])
// Get entire fileURL, check status code 200.
resp, err = fileURL.Download(context.Background(), 0, 0)
c.Assert(err, chk.IsNil)
c.Assert(resp.StatusCode(), chk.Equals, http.StatusOK)
c.Assert(resp.ContentLength(), chk.Equals, int64(4096))
c.Assert(resp.Date(), chk.Not(chk.Equals), "")
c.Assert(resp.ETag(), chk.Not(chk.Equals), "")
c.Assert(resp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(resp.RequestID(), chk.Not(chk.Equals), "")
c.Assert(resp.Version(), chk.Not(chk.Equals), "")
// Verify the entire content
download, err = ioutil.ReadAll(resp.Response().Body)
c.Assert(err, chk.IsNil)
c.Assert(download[:2048], chk.DeepEquals, contentD1[:])
c.Assert(download[2048:], chk.DeepEquals, contentD2[:])
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"errors"
"github.com/Azure/azure-storage-azcopy/ste"
"net/url"
"strings"
"sync/atomic"
)
func newLocalTraverserForSync(cca *cookedSyncCmdArgs, isSource bool) (*localTraverser, error) {
var fullPath string
if isSource {
fullPath = cca.source
} else {
fullPath = cca.destination
}
if strings.ContainsAny(fullPath, "*?") {
return nil, errors.New("illegal local path, no pattern matching allowed for sync command")
}
incrementEnumerationCounter := func() {
var counterAddr *uint64
if isSource {
counterAddr = &cca.atomicSourceFilesScanned
} else {
counterAddr = &cca.atomicDestinationFilesScanned
}
atomic.AddUint64(counterAddr, 1)
}
traverser := newLocalTraverser(fullPath, cca.recursive, incrementEnumerationCounter)
return traverser, nil
}
func newBlobTraverserForSync(cca *cookedSyncCmdArgs, isSource bool) (t *blobTraverser, err error) {
ctx := context.WithValue(context.TODO(), ste.ServiceAPIVersionOverride, ste.DefaultServiceApiVersion)
// figure out the right URL
var rawURL *url.URL
if isSource {
rawURL, err = url.Parse(cca.source)
if err == nil && cca.sourceSAS != "" {
copyHandlerUtil{}.appendQueryParamToUrl(rawURL, cca.sourceSAS)
}
} else {
rawURL, err = url.Parse(cca.destination)
if err == nil && cca.destinationSAS != "" {
copyHandlerUtil{}.appendQueryParamToUrl(rawURL, cca.destinationSAS)
}
}
if err != nil {
return
}
if strings.Contains(rawURL.Path, "*") {
return nil, errors.New("illegal URL, no pattern matching allowed for sync command")
}
p, err := createBlobPipeline(ctx, cca.credentialInfo)
if err != nil {
return
}
incrementEnumerationCounter := func() {
var counterAddr *uint64
if isSource {
counterAddr = &cca.atomicSourceFilesScanned
} else {
counterAddr = &cca.atomicDestinationFilesScanned
}
atomic.AddUint64(counterAddr, 1)
}
return newBlobTraverser(rawURL, p, ctx, cca.recursive, incrementEnumerationCounter), nil
}
<file_sep>package common
import (
"log"
"os"
"strconv"
)
// Get the value of environment variable AZCOPY_CONCURRENCY_VALUE
// If the environment variable is set, it defines the number of concurrent connections
// transfer engine will spawn. If not set, transfer engine will spawn the default number
// of concurrent connections
func ComputeConcurrencyValue(numOfCPUs int) int {
concurrencyValueOverride := os.Getenv("AZCOPY_CONCURRENCY_VALUE")
if concurrencyValueOverride != "" {
val, err := strconv.ParseInt(concurrencyValueOverride, 10, 64)
if err != nil {
log.Fatalf("error parsing the env AZCOPY_CONCURRENCY_VALUE %q failed with error %v",
concurrencyValueOverride, err)
}
return int(val)
}
// fix the concurrency value for smaller machines
if numOfCPUs <= 4 {
return 32
}
// for machines that are extremely powerful, fix to 300 to avoid running out of file descriptors
if 16*numOfCPUs > 300 {
return 300
}
// for moderately powerful machines, compute a reasonable number
return 16 * numOfCPUs
}
<file_sep>package ste
import (
"net/http"
"github.com/Azure/azure-storage-azcopy/azbfs"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/Azure/azure-storage-file-go/azfile"
)
type ErrorEx struct {
error
}
// TODO: consider rolling MSRequestID into this, so that all places that use this can pick up, and log, the request ID too
func (errex ErrorEx) ErrorCodeAndString() (int, string) {
switch e := interface{}(errex.error).(type) {
case azblob.StorageError:
return e.Response().StatusCode, e.Response().Status
case azfile.StorageError:
return e.Response().StatusCode, e.Response().Status
case azbfs.StorageError:
return e.Response().StatusCode, e.Response().Status
default:
return 0, errex.Error()
}
}
type hasResponse interface {
Response() *http.Response
}
// MSRequestID gets the request ID guid associated with the failed request.
// Returns "" if there isn't one (either no request, or there is a request but it doesn't have the header)
func (errex ErrorEx) MSRequestID() string {
if respErr, ok := errex.error.(hasResponse); ok {
r := respErr.Response()
if r != nil {
return r.Header.Get("X-Ms-Request-Id")
}
}
return ""
}
<file_sep>FROM golang:1.10
ENV GOPATH /go
ENV PATH ${GOPATH}/bin:$PATH
RUN go get -u github.com/golang/dep/cmd/dep
RUN go get -u golang.org/x/lint/golint
# Prepare enviroment for OSX cross compilation.
# These steps are referenced from https://github.com/karalabe/xgo/blob/master/docker/base/Dockerfile (licensed with MIT)
# Note: Cross-compile might be considered to be replaced, when MacOS's container is used for testing.
# In that case, OSX's binary can be compiled directly in MacOS.
# For other platform's cross compilation, please refer to https://github.com/karalabe/xgo.
RUN \
apt-get update && \
apt-get install -y cmake libxml2-dev libssl-dev && \
apt-get install -y clang patch xz-utils && \
apt-get install -y libglib2.0-dev && \
apt-get install -y libgnome-keyring-dev && \
apt-get install -y libsecret-1-dev
#ENV OSX_SDK MacOSX10.11.sdk
#ENV OSX_NDK_X86 /usr/local/osx-ndk-x86
# Following steps in https://github.com/tpoechtrager/osxcross to prepare the OS X cross toolchain for Linux.
#RUN \
# OSX_SDK_PATH=https://s3.dockerproject.org/darwin/v2/$OSX_SDK.tar.xz && \
# wget -q $OSX_SDK_PATH && \
# \
# git clone https://github.com/tpoechtrager/osxcross.git && \
# mv `basename $OSX_SDK_PATH` ./osxcross/tarballs/ && \
# \
# sed -i -e 's|-march=native||g' ./osxcross/build_clang.sh ./osxcross/wrapper/build.sh && \
# UNATTENDED=yes OSX_VERSION_MIN=10.6 ./osxcross/build.sh && \
# mv ./osxcross/target $OSX_NDK_X86 && \
# \
# rm -rf ./osxcross
ENV PATH $OSX_NDK_X86/bin:$PATH<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"context"
"encoding/base64"
"errors"
"fmt"
"net/url"
"sync"
"time"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
)
type blockBlobSenderBase struct {
jptm IJobPartTransferMgr
destBlockBlobURL azblob.BlockBlobURL
chunkSize uint32
numChunks uint32
pacer *pacer
blockIDs []string
destBlobTier azblob.AccessTierType
// Headers and other info that we will apply to the destination
// object. For S2S, these come from the source service.
// When sending local data, they are computed based on
// the properties of the local file
headersToApply azblob.BlobHTTPHeaders
metadataToApply azblob.Metadata
atomicPutListIndicator int32
muBlockIDs *sync.Mutex
}
func newBlockBlobSenderBase(jptm IJobPartTransferMgr, destination string, p pipeline.Pipeline, pacer *pacer, srcInfoProvider ISourceInfoProvider, inferredAccessTierType azblob.AccessTierType) (*blockBlobSenderBase, error) {
transferInfo := jptm.Info()
// compute chunk count
chunkSize := transferInfo.BlockSize
srcSize := transferInfo.SourceSize
numChunks := getNumChunks(srcSize, chunkSize)
if numChunks > common.MaxNumberOfBlocksPerBlob {
return nil, fmt.Errorf("BlockSize %d for source of size %d is not correct. Number of blocks will exceed the limit", chunkSize, srcSize)
}
destURL, err := url.Parse(destination)
if err != nil {
return nil, err
}
destBlockBlobURL := azblob.NewBlockBlobURL(*destURL, p)
props, err := srcInfoProvider.Properties()
if err != nil {
return nil, err
}
// If user set blob tier explicitly, override any value that our caller
// may have guessed.
destBlobTier := inferredAccessTierType
blockBlobTierOverride, _ := jptm.BlobTiers()
if blockBlobTierOverride != common.EBlockBlobTier.None() {
destBlobTier = blockBlobTierOverride.ToAccessTierType()
}
return &blockBlobSenderBase{
jptm: jptm,
destBlockBlobURL: destBlockBlobURL,
chunkSize: chunkSize,
numChunks: numChunks,
pacer: pacer,
blockIDs: make([]string, numChunks),
headersToApply: props.SrcHTTPHeaders.ToAzBlobHTTPHeaders(),
metadataToApply: props.SrcMetadata.ToAzBlobMetadata(),
destBlobTier: destBlobTier,
muBlockIDs: &sync.Mutex{}}, nil
}
func (s *blockBlobSenderBase) ChunkSize() uint32 {
return s.chunkSize
}
func (s *blockBlobSenderBase) NumChunks() uint32 {
return s.numChunks
}
func (s *blockBlobSenderBase) RemoteFileExists() (bool, error) {
return remoteObjectExists(s.destBlockBlobURL.GetProperties(s.jptm.Context(), azblob.BlobAccessConditions{}))
}
func (s *blockBlobSenderBase) Prologue(ps common.PrologueState) {
if ps.CanInferContentType() {
// sometimes, specifically when reading local files, we have more info
// about the file type at this time than what we had before
s.headersToApply.ContentType = ps.GetInferredContentType(s.jptm)
}
}
func (s *blockBlobSenderBase) Epilogue() {
jptm := s.jptm
s.muBlockIDs.Lock()
blockIDs := s.blockIDs
s.muBlockIDs.Unlock()
shouldPutBlockList := getPutListNeed(&s.atomicPutListIndicator)
if shouldPutBlockList == putListNeedUnknown && !jptm.WasCanceled() {
panic(errors.New("'put list' need flag was never set"))
}
// TODO: finalize and wrap in functions whether 0 is included or excluded in status comparisons
// commit block list if necessary
if jptm.TransferStatus() > 0 && shouldPutBlockList == putListNeeded {
jptm.Log(pipeline.LogDebug, fmt.Sprintf("Conclude Transfer with BlockList %s", blockIDs))
// commit the blocks.
if _, err := s.destBlockBlobURL.CommitBlockList(jptm.Context(), blockIDs, s.headersToApply, s.metadataToApply, azblob.BlobAccessConditions{}); err != nil {
jptm.FailActiveSend("Committing block list", err)
// don't return, since need cleanup below
}
}
// Set tier
// GPv2 or Blob Storage is supported, GPv1 is not supported, can only set to blob without snapshot in active status.
// https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers
if jptm.TransferStatus() > 0 && s.destBlobTier != azblob.AccessTierNone {
// Set the latest service version from sdk as service version in the context.
ctxWithLatestServiceVersion := context.WithValue(jptm.Context(), ServiceAPIVersionOverride, azblob.ServiceVersion)
_, err := s.destBlockBlobURL.SetTier(ctxWithLatestServiceVersion, s.destBlobTier, azblob.LeaseAccessConditions{})
if err != nil {
jptm.FailActiveSendWithStatus("Setting BlockBlob tier", err, common.ETransferStatus.BlobTierFailure())
// don't return, because need cleanup below
}
}
// Cleanup
if jptm.TransferStatus() <= 0 { // TODO: <=0 or <0?
// If the transfer status value < 0, then transfer failed with some failure
// there is a possibility that some uncommitted blocks will be there
// Delete the uncommitted blobs
// TODO: should we really do this deletion? What if we are in an overwrite-existing-blob
// situation. Deletion has very different semantics then, compared to not deleting.
deletionContext, cancelFn := context.WithTimeout(context.Background(), 30*time.Second)
defer cancelFn()
_, _ = s.destBlockBlobURL.Delete(deletionContext, azblob.DeleteSnapshotsOptionNone, azblob.BlobAccessConditions{})
// TODO: question, is it OK to remoe this logging of failures (since there's no adverse effect of failure)
// if stErr, ok := err.(azblob.StorageError); ok && stErr.Response().StatusCode != http.StatusNotFound {
// If the delete failed with Status Not Found, then it means there were no uncommitted blocks.
// Other errors report that uncommitted blocks are there
// bbu.jptm.LogError(bbu.blobURL.String(), "Deleting uncommitted blocks", err)
// }
}
}
func (s *blockBlobSenderBase) setBlockID(index int32, value string) {
s.muBlockIDs.Lock()
defer s.muBlockIDs.Unlock()
if len(s.blockIDs[index]) > 0 {
panic(errors.New("block id set twice for one block"))
}
s.blockIDs[index] = value
}
func (s *blockBlobSenderBase) generateEncodedBlockID() string {
blockID := common.NewUUID().String()
return base64.StdEncoding.EncodeToString([]byte(blockID))
}
<file_sep>package cmd
import (
"bytes"
"context"
"fmt"
"net/url"
"os"
"time"
"io"
"io/ioutil"
"math/rand"
"net/http"
"strings"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-file-go/azfile"
"github.com/Azure/azure-storage-blob-go/azblob"
minio "github.com/minio/minio-go"
"github.com/spf13/cobra"
)
const charset = "abcdefghijklmnopqrstuvwxyz" +
"ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
func createStringWithRandomChars(length int) string {
b := make([]byte, length)
for i := range b {
b[i] = charset[rand.Int()%len(charset)]
}
return string(b)
}
// initializes the create command, its aliases and description.
func init() {
resourceURL := ""
serviceType := EServiceType.Blob()
resourceType := EResourceType.SingleFile()
serviceTypeStr := ""
resourceTypeStr := ""
blobSize := uint32(0)
metaData := ""
contentType := ""
contentEncoding := ""
contentDisposition := ""
contentLanguage := ""
cacheControl := ""
contentMD5 := ""
location := ""
createCmd := &cobra.Command{
Use: "create",
Aliases: []string{"create"},
Short: "create creates resource.",
Args: func(cmd *cobra.Command, args []string) error {
if len(args) > 1 {
return fmt.Errorf("invalid arguments for create command")
}
resourceURL = args[0]
return nil
},
Run: func(cmd *cobra.Command, args []string) {
err := (&serviceType).Parse(serviceTypeStr)
if err != nil {
panic(fmt.Errorf("fail to parse service type %q, %v", serviceTypeStr, err))
}
err = (&resourceType).Parse(resourceTypeStr)
if err != nil {
panic(fmt.Errorf("fail to parse resource type %q, %v", resourceTypeStr, err))
}
switch serviceType {
case EServiceType.Blob():
switch resourceType {
case EResourceType.Bucket():
createContainer(resourceURL)
case EResourceType.SingleFile():
createBlob(
resourceURL,
blobSize,
getBlobMetadata(metaData),
azblob.BlobHTTPHeaders{
ContentType: contentType,
ContentDisposition: contentDisposition,
ContentEncoding: contentEncoding,
ContentLanguage: contentLanguage,
ContentMD5: []byte(contentMD5),
CacheControl: cacheControl,
})
default:
panic(fmt.Errorf("not implemented %v", resourceType))
}
case EServiceType.File():
switch resourceType {
case EResourceType.Bucket():
createShareOrDirectory(resourceURL)
case EResourceType.SingleFile():
createFile(
resourceURL,
blobSize,
getFileMetadata(metaData),
azfile.FileHTTPHeaders{
ContentType: contentType,
ContentDisposition: contentDisposition,
ContentEncoding: contentEncoding,
ContentLanguage: contentLanguage,
ContentMD5: []byte(contentMD5),
CacheControl: cacheControl,
})
default:
panic(fmt.Errorf("not implemented %v", resourceType))
}
case EServiceType.S3():
switch resourceType {
case EResourceType.Bucket():
createBucket(resourceURL)
case EResourceType.SingleFile():
// For S3, no content-MD5 will be returned during HEAD, i.e. no content-MD5 will be preserved during copy.
// And content-MD5 header is not set during upload. E.g. in S3 management portal, no property content-MD5 can be set.
// So here create object without content-MD5 as common practice.
createObject(
resourceURL,
blobSize,
minio.PutObjectOptions{
ContentType: contentType,
ContentDisposition: contentDisposition,
ContentEncoding: contentEncoding,
ContentLanguage: contentLanguage,
CacheControl: cacheControl,
UserMetadata: getS3Metadata(metaData),
})
default:
panic(fmt.Errorf("not implemented %v", resourceType))
}
case EServiceType.BlobFS():
panic(fmt.Errorf("not implemented %v", serviceType))
default:
panic(fmt.Errorf("illegal resourceType %q", resourceType))
}
},
}
rootCmd.AddCommand(createCmd)
createCmd.PersistentFlags().StringVar(&serviceTypeStr, "serviceType", "Blob", "Service type, could be blob, file or blobFS currently.")
createCmd.PersistentFlags().StringVar(&resourceTypeStr, "resourceType", "SingleFile", "Resource type, could be a single file, bucket.")
createCmd.PersistentFlags().Uint32Var(&blobSize, "blob-size", 0, "")
createCmd.PersistentFlags().StringVar(&metaData, "metadata", "", "metadata for blob.")
createCmd.PersistentFlags().StringVar(&contentType, "content-type", "", "content type for blob.")
createCmd.PersistentFlags().StringVar(&contentEncoding, "content-encoding", "", "content encoding for blob.")
createCmd.PersistentFlags().StringVar(&contentDisposition, "content-disposition", "", "content disposition for blob.")
createCmd.PersistentFlags().StringVar(&contentLanguage, "content-language", "", "content language for blob.")
createCmd.PersistentFlags().StringVar(&cacheControl, "cache-control", "", "cache control for blob.")
createCmd.PersistentFlags().StringVar(&contentMD5, "content-md5", "", "content MD5 for blob.")
createCmd.PersistentFlags().StringVar(&location, "location", "", "Location of the Azure account or S3 bucket to create")
}
func getBlobMetadata(metadataString string) azblob.Metadata {
var metadata azblob.Metadata
if len(metadataString) > 0 {
metadata = azblob.Metadata{}
for _, keyAndValue := range strings.Split(metadataString, ";") { // key/value pairs are separated by ';'
kv := strings.Split(keyAndValue, "=") // key/value are separated by '='
metadata[kv[0]] = kv[1]
}
}
return metadata
}
func getFileMetadata(metadataString string) azfile.Metadata {
var metadata azfile.Metadata
if len(metadataString) > 0 {
metadata = azfile.Metadata{}
for _, keyAndValue := range strings.Split(metadataString, ";") { // key/value pairs are separated by ';'
kv := strings.Split(keyAndValue, "=") // key/value are separated by '='
metadata[kv[0]] = kv[1]
}
}
return metadata
}
func getS3Metadata(metadataString string) map[string]string {
metadata := make(map[string]string)
if len(metadataString) > 0 {
for _, keyAndValue := range strings.Split(metadataString, ";") { // key/value pairs are separated by ';'
kv := strings.Split(keyAndValue, "=") // key/value are separated by '='
metadata[kv[0]] = kv[1]
}
}
return metadata
}
// Can be used for overwrite scenarios.
func createContainer(container string) {
u, err := url.Parse(container)
if err != nil {
fmt.Println("error parsing the container URL with SAS ", err)
os.Exit(1)
}
p := azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{})
containerURL := azblob.NewContainerURL(*u, p)
_, err = containerURL.Create(context.Background(), azblob.Metadata{}, azblob.PublicAccessNone)
if ignoreStorageConflictStatus(err) != nil {
fmt.Println("fail to create container, ", err)
os.Exit(1)
}
}
func createBlob(blobURL string, blobSize uint32, metadata azblob.Metadata, blobHTTPHeaders azblob.BlobHTTPHeaders) {
url, err := url.Parse(blobURL)
if err != nil {
fmt.Println("error parsing the blob sas ", err)
os.Exit(1)
}
p := azblob.NewPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{})
blobUrl := azblob.NewBlockBlobURL(*url, p)
randomString := createStringWithRandomChars(int(blobSize))
if blobHTTPHeaders.ContentType == "" {
blobHTTPHeaders.ContentType = http.DetectContentType([]byte(randomString))
}
putBlobResp, err := blobUrl.Upload(
context.Background(),
strings.NewReader(randomString),
blobHTTPHeaders,
metadata,
azblob.BlobAccessConditions{})
if err != nil {
fmt.Println(fmt.Sprintf("error uploading the blob %v", err))
os.Exit(1)
}
if putBlobResp.Response() != nil {
io.Copy(ioutil.Discard, putBlobResp.Response().Body)
putBlobResp.Response().Body.Close()
}
}
func createShareOrDirectory(shareOrDirectoryURLStr string) {
u, err := url.Parse(shareOrDirectoryURLStr)
if err != nil {
fmt.Println("error parsing the share or directory URL with SAS ", err)
os.Exit(1)
}
p := azfile.NewPipeline(azfile.NewAnonymousCredential(), azfile.PipelineOptions{})
fileURLPart := azfile.NewFileURLParts(*u)
isShare := false
if fileURLPart.ShareName != "" && fileURLPart.DirectoryOrFilePath == "" {
isShare = true
// This is a share
shareURL := azfile.NewShareURL(*u, p)
_, err := shareURL.Create(context.Background(), azfile.Metadata{}, 0)
if ignoreStorageConflictStatus(err) != nil {
fmt.Println("fail to create share, ", err)
os.Exit(1)
}
}
dirURL := azfile.NewDirectoryURL(*u, p) // i.e. root directory, in share's case
if !isShare {
_, err := dirURL.Create(context.Background(), azfile.Metadata{})
if ignoreStorageConflictStatus(err) != nil {
fmt.Println("fail to create directory, ", err)
os.Exit(1)
}
}
// Finally valdiate if directory with specified URL exists, if doesn't exist, then report create failure.
time.Sleep(1 * time.Second)
_, err = dirURL.GetProperties(context.Background())
if err != nil {
fmt.Println("error createShareOrDirectory with URL, ", err)
os.Exit(1)
}
}
func createFile(fileURLStr string, fileSize uint32, metadata azfile.Metadata, fileHTTPHeaders azfile.FileHTTPHeaders) {
url, err := url.Parse(fileURLStr)
if err != nil {
fmt.Println("error parsing the blob sas ", err)
os.Exit(1)
}
p := azfile.NewPipeline(azfile.NewAnonymousCredential(), azfile.PipelineOptions{})
fileURL := azfile.NewFileURL(*url, p)
randomString := createStringWithRandomChars(int(fileSize))
if fileHTTPHeaders.ContentType == "" {
fileHTTPHeaders.ContentType = http.DetectContentType([]byte(randomString))
}
err = azfile.UploadBufferToAzureFile(context.Background(), []byte(randomString), fileURL, azfile.UploadToAzureFileOptions{
FileHTTPHeaders: fileHTTPHeaders,
Metadata: metadata,
})
if err != nil {
fmt.Println(fmt.Sprintf("error uploading the file %v", err))
os.Exit(1)
}
}
func createBucket(bucketURLStr string) {
u, err := url.Parse(bucketURLStr)
if err != nil {
fmt.Println("fail to parse the bucket URL, ", err)
os.Exit(1)
}
s3URLParts, err := common.NewS3URLParts(*u)
if err != nil {
fmt.Println("new S3 URL parts, ", err)
os.Exit(1)
}
s3Client := createS3ClientWithMinio(createS3ResOptions{
Location: s3URLParts.Region,
})
if err := s3Client.MakeBucket(s3URLParts.BucketName, s3URLParts.Region); err != nil {
exists, err := s3Client.BucketExists(s3URLParts.BucketName)
if err != nil || !exists {
fmt.Println("fail to create bucket, ", err)
os.Exit(1)
}
}
}
func createObject(objectURLStr string, objectSize uint32, o minio.PutObjectOptions) {
u, err := url.Parse(objectURLStr)
if err != nil {
fmt.Println("fail to parse the object URL, ", err)
os.Exit(1)
}
s3URLParts, err := common.NewS3URLParts(*u)
if err != nil {
fmt.Println("new S3 URL parts, ", err)
os.Exit(1)
}
s3Client := createS3ClientWithMinio(createS3ResOptions{
Location: s3URLParts.Region,
})
randomString := createStringWithRandomChars(int(objectSize))
if o.ContentType == "" {
o.ContentType = http.DetectContentType([]byte(randomString))
}
_, err = s3Client.PutObject(s3URLParts.BucketName, s3URLParts.ObjectKey, bytes.NewReader([]byte(randomString)), int64(objectSize), o)
if err != nil {
fmt.Println("fail to upload file to S3 object, ", err)
os.Exit(1)
}
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"fmt"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-azcopy/ste"
"github.com/Azure/azure-storage-blob-go/azblob"
"net/url"
"os"
"path"
"path/filepath"
"strings"
)
// extract the right info from cooked arguments and instantiate a generic copy transfer processor from it
func newSyncTransferProcessor(cca *cookedSyncCmdArgs, numOfTransfersPerPart int, isSingleFileSync bool) *copyTransferProcessor {
copyJobTemplate := &common.CopyJobPartOrderRequest{
JobID: cca.jobID,
CommandString: cca.commandString,
FromTo: cca.fromTo,
SourceRoot: replacePathSeparators(cca.source),
DestinationRoot: replacePathSeparators(cca.destination),
// authentication related
CredentialInfo: cca.credentialInfo,
SourceSAS: cca.sourceSAS,
DestinationSAS: cca.destinationSAS,
// flags
BlobAttributes: common.BlobTransferAttributes{
PreserveLastModifiedTime: true, // must be true for sync so that future syncs have this information available
PutMd5: cca.putMd5,
MD5ValidationOption: cca.md5ValidationOption,
BlockSizeInBytes: cca.blockSize},
ForceWrite: true, // once we decide to transfer for a sync operation, we overwrite the destination regardless
LogLevel: cca.logVerbosity,
}
if !isSingleFileSync {
if !strings.HasSuffix(copyJobTemplate.SourceRoot, common.AZCOPY_PATH_SEPARATOR_STRING) {
copyJobTemplate.SourceRoot += common.AZCOPY_PATH_SEPARATOR_STRING
}
if !strings.HasSuffix(copyJobTemplate.DestinationRoot, common.AZCOPY_PATH_SEPARATOR_STRING) {
copyJobTemplate.DestinationRoot += common.AZCOPY_PATH_SEPARATOR_STRING
}
}
reportFirstPart := func() { cca.setFirstPartOrdered() }
reportFinalPart := func() { cca.isEnumerationComplete = true }
shouldEncodeSource := cca.fromTo.From().IsRemote()
shouldEncodeDestination := cca.fromTo.To().IsRemote()
// note that the source and destination, along with the template are given to the generic processor's constructor
// this means that given an object with a relative path, this processor already knows how to schedule the right kind of transfers
return newCopyTransferProcessor(copyJobTemplate, numOfTransfersPerPart, cca.source, cca.destination,
shouldEncodeSource, shouldEncodeDestination, reportFirstPart, reportFinalPart)
}
// base for delete processors targeting different resources
type interactiveDeleteProcessor struct {
// the plugged-in deleter that performs the actual deletion
deleter objectProcessor
// whether we should ask the user for permission the first time we delete a file
shouldPromptUser bool
// note down whether any delete should happen
shouldDelete bool
// used for prompt message
// examples: "blob", "local file", etc.
objectTypeToDisplay string
// used for prompt message
// examples: a directory path, or url to container
objectLocationToDisplay string
// count the deletions that happened
incrementDeletionCount func()
}
func (d *interactiveDeleteProcessor) removeImmediately(object storedObject) (err error) {
if d.shouldPromptUser {
d.shouldDelete, d.shouldPromptUser = d.promptForConfirmation(object) // note down the user's decision
}
if !d.shouldDelete {
return nil
}
err = d.deleter(object)
if err != nil {
glcm.Info(fmt.Sprintf("error %s deleting the object %s", err.Error(), object.relativePath))
}
if d.incrementDeletionCount != nil {
d.incrementDeletionCount()
}
return
}
func (d *interactiveDeleteProcessor) promptForConfirmation(object storedObject) (shouldDelete bool, keepPrompting bool) {
answer := glcm.Prompt(fmt.Sprintf("The %s '%s' does not exist at the source. "+
"Do you wish to delete it from the destination(%s)? "+
"[Y] Yes [A] Yes to all [N] No [L] No to all (default is N):",
d.objectTypeToDisplay, object.relativePath, d.objectLocationToDisplay))
switch strings.ToLower(answer) {
case "y":
// print nothing, since the deleter is expected to log the message when the delete happens
return true, true
case "a":
glcm.Info(fmt.Sprintf("Confirmed. All the extra %ss will be deleted.", d.objectTypeToDisplay))
return true, false
case "n":
glcm.Info(fmt.Sprintf("Keeping extra %s: %s", d.objectTypeToDisplay, object.relativePath))
return false, true
case "l":
glcm.Info("No deletions will happen from now onwards.")
return false, false
default:
glcm.Info(fmt.Sprintf("Unrecognizable answer, keeping extra %s: %s.", d.objectTypeToDisplay, object.relativePath))
return false, true
}
}
func newInteractiveDeleteProcessor(deleter objectProcessor, deleteDestination common.DeleteDestination,
objectTypeToDisplay string, objectLocationToDisplay string, incrementDeletionCounter func()) *interactiveDeleteProcessor {
return &interactiveDeleteProcessor{
deleter: deleter,
objectTypeToDisplay: objectTypeToDisplay,
objectLocationToDisplay: objectLocationToDisplay,
incrementDeletionCount: incrementDeletionCounter,
shouldPromptUser: deleteDestination == common.EDeleteDestination.Prompt(),
shouldDelete: deleteDestination == common.EDeleteDestination.True(), // if shouldPromptUser is true, this will start as false, but we will determine its value later
}
}
func newSyncLocalDeleteProcessor(cca *cookedSyncCmdArgs) *interactiveDeleteProcessor {
localDeleter := localFileDeleter{rootPath: cca.destination}
return newInteractiveDeleteProcessor(localDeleter.deleteFile, cca.deleteDestination, "local file", cca.destination, cca.incrementDeletionCount)
}
type localFileDeleter struct {
rootPath string
}
func (l *localFileDeleter) deleteFile(object storedObject) error {
glcm.Info("Deleting extra file: " + object.relativePath)
return os.Remove(filepath.Join(l.rootPath, object.relativePath))
}
func newSyncBlobDeleteProcessor(cca *cookedSyncCmdArgs) (*interactiveDeleteProcessor, error) {
rawURL, err := url.Parse(cca.destination)
if err != nil {
return nil, err
} else if err == nil && cca.destinationSAS != "" {
copyHandlerUtil{}.appendQueryParamToUrl(rawURL, cca.destinationSAS)
}
ctx := context.WithValue(context.TODO(), ste.ServiceAPIVersionOverride, ste.DefaultServiceApiVersion)
p, err := createBlobPipeline(ctx, cca.credentialInfo)
if err != nil {
return nil, err
}
return newInteractiveDeleteProcessor(newBlobDeleter(rawURL, p, ctx).deleteBlob,
cca.deleteDestination, "blob", cca.destination, cca.incrementDeletionCount), nil
}
type blobDeleter struct {
rootURL *url.URL
p pipeline.Pipeline
ctx context.Context
}
func newBlobDeleter(rawRootURL *url.URL, p pipeline.Pipeline, ctx context.Context) *blobDeleter {
return &blobDeleter{
rootURL: rawRootURL,
p: p,
ctx: ctx,
}
}
func (b *blobDeleter) deleteBlob(object storedObject) error {
glcm.Info("Deleting extra blob: " + object.relativePath)
// construct the blob URL using its relative path
// the rootURL could be pointing to a container, or a virtual directory
blobURLParts := azblob.NewBlobURLParts(*b.rootURL)
blobURLParts.BlobName = path.Join(blobURLParts.BlobName, object.relativePath)
blobURL := azblob.NewBlobURL(blobURLParts.URL(), b.p)
_, err := blobURL.Delete(b.ctx, azblob.DeleteSnapshotsOptionInclude, azblob.BlobAccessConditions{})
return err
}
<file_sep>package azbfs_test
import (
"context"
"github.com/Azure/azure-storage-azcopy/azbfs"
chk "gopkg.in/check.v1"
"net/http"
)
type DirectoryUrlSuite struct{}
var _ = chk.Suite(&DirectoryUrlSuite{})
// deleteDirectory deletes the directory represented by directory Url
func deleteDirectory(c *chk.C, dul azbfs.DirectoryURL) {
resp, err := dul.Delete(context.Background(), nil, true)
c.Assert(err, chk.IsNil)
c.Assert(resp.Response().StatusCode, chk.Equals, http.StatusOK)
}
// TestCreateDirectory test the creation of a directory
func (dus *DirectoryUrlSuite) TestCreateDeleteDirectory(c *chk.C) {
// Create a file system
fsu := getBfsServiceURL()
fsURL, _ := createNewFileSystem(c, fsu)
defer delFileSystem(c, fsURL)
// Create a directory url from the fileSystem Url
dirUrl, _ := getDirectoryURLFromFileSystem(c, fsURL)
cResp, err := dirUrl.Create(context.Background())
defer deleteDirectory(c, dirUrl)
// Assert the directory create response header attributes
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, http.StatusCreated)
c.Assert(cResp.ETag(), chk.Not(chk.Equals), "")
c.Assert(cResp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(cResp.Date(), chk.Not(chk.Equals), "")
}
// TestCreateSubDir tests creating the sub-directory inside a directory
func (dus *DirectoryUrlSuite) TestCreateSubDir(c *chk.C) {
// Create the file system
fsu := getBfsServiceURL()
fsURL, _ := createNewFileSystem(c, fsu)
defer delFileSystem(c, fsURL)
// Create the directory Url from fileSystem Url and create directory
dirUrl, _ := getDirectoryURLFromFileSystem(c, fsURL)
cResp, err := dirUrl.Create(context.Background())
defer deleteDirectory(c, dirUrl)
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, http.StatusCreated)
c.Assert(cResp.ETag(), chk.Not(chk.Equals), "")
c.Assert(cResp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(cResp.Date(), chk.Not(chk.Equals), "")
// Create the sub-directory url from directory Url and create sub-directory
subDirUrl, _ := getDirectoryURLFromDirectory(c, dirUrl)
cResp, err = subDirUrl.Create(context.Background())
defer deleteDirectory(c, subDirUrl)
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, http.StatusCreated)
c.Assert(cResp.ETag(), chk.Not(chk.Equals), "")
c.Assert(cResp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(cResp.Date(), chk.Not(chk.Equals), "")
}
// TestDirectoryCreateAndGetProperties tests the create directory and
// get directory properties
func (dus *DirectoryUrlSuite) TestDirectoryCreateAndGetProperties(c *chk.C) {
// Create file system
fsu := getBfsServiceURL()
fsURL, _ := createNewFileSystem(c, fsu)
defer delFileSystem(c, fsURL)
// Create directory url from fileSystemUrl and create directory
dirUrl, _ := getDirectoryURLFromFileSystem(c, fsURL)
cResp, err := dirUrl.Create(context.Background())
defer deleteDirectory(c, dirUrl)
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, http.StatusCreated)
c.Assert(cResp.ETag(), chk.Not(chk.Equals), "")
c.Assert(cResp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(cResp.Date(), chk.Not(chk.Equals), "")
// Get the directory properties and verify the resource type
gResp, err := dirUrl.GetProperties(context.Background())
c.Assert(err, chk.IsNil)
c.Assert(gResp.StatusCode(), chk.Equals, http.StatusOK)
c.Assert(gResp.XMsResourceType(), chk.Equals, "directory")
}
// TestCreateDirectoryAndFiles tests the create directory and create file inside the directory
func (dus *DirectoryUrlSuite) TestCreateDirectoryAndFiles(c *chk.C) {
// Create the file system
fsu := getBfsServiceURL()
fsURL, _ := createNewFileSystem(c, fsu)
defer delFileSystem(c, fsURL)
// Create the directoryUrl from fileSystemUrl
// and create directory
dirUrl, _ := getDirectoryURLFromFileSystem(c, fsURL)
cResp, err := dirUrl.Create(context.Background())
defer deleteDirectory(c, dirUrl)
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, http.StatusCreated)
c.Assert(cResp.ETag(), chk.Not(chk.Equals), "")
c.Assert(cResp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(cResp.Date(), chk.Not(chk.Equals), "")
// Create fileUrl from directoryUrl and create file inside the directory
fileUrl, _ := getFileURLFromDirectory(c, dirUrl)
fresp, err := fileUrl.Create(context.Background(), azbfs.BlobFSHTTPHeaders{})
defer delFile(c, fileUrl)
c.Assert(err, chk.IsNil)
c.Assert(fresp.Response().StatusCode, chk.Equals, http.StatusCreated)
c.Assert(fresp.ETag(), chk.Not(chk.Equals), "")
c.Assert(fresp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(fresp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(fresp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(fresp.Date(), chk.Not(chk.Equals), "")
}
// TestDirectoryStructure tests creating dir, sub-dir inside dir and files
// inside dirs and sub-dirs. Then verify the count of files / sub-dirs inside directory
func (dus *DirectoryUrlSuite) TestDirectoryStructure(c *chk.C) {
// Create file system
fsu := getBfsServiceURL()
fsURL, _ := createNewFileSystem(c, fsu)
defer delFileSystem(c, fsURL)
// Create a directory inside filesystem
dirUrl, _ := getDirectoryURLFromFileSystem(c, fsURL)
cResp, err := dirUrl.Create(context.Background())
defer deleteDirectory(c, dirUrl)
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, http.StatusCreated)
c.Assert(cResp.ETag(), chk.Not(chk.Equals), "")
c.Assert(cResp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(cResp.Date(), chk.Not(chk.Equals), "")
// Create a sub-dir inside the above create directory
subDirUrl, _ := getDirectoryURLFromDirectory(c, dirUrl)
cResp, err = subDirUrl.Create(context.Background())
defer deleteDirectory(c, subDirUrl)
c.Assert(err, chk.IsNil)
c.Assert(cResp.StatusCode(), chk.Equals, http.StatusCreated)
c.Assert(cResp.ETag(), chk.Not(chk.Equals), "")
c.Assert(cResp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(cResp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(cResp.Date(), chk.Not(chk.Equals), "")
// Create a file inside directory
fileUrl, _ := getFileURLFromDirectory(c, dirUrl)
fresp, err := fileUrl.Create(context.Background(), azbfs.BlobFSHTTPHeaders{})
defer delFile(c, fileUrl)
c.Assert(err, chk.IsNil)
c.Assert(fresp.Response().StatusCode, chk.Equals, http.StatusCreated)
c.Assert(fresp.ETag(), chk.Not(chk.Equals), "")
c.Assert(fresp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(fresp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(fresp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(fresp.Date(), chk.Not(chk.Equals), "")
// create a file inside the sub-dir created above
subDirfileUrl, _ := getFileURLFromDirectory(c, subDirUrl)
fresp, err = subDirfileUrl.Create(context.Background(), azbfs.BlobFSHTTPHeaders{})
defer delFile(c, subDirfileUrl)
c.Assert(err, chk.IsNil)
c.Assert(fresp.Response().StatusCode, chk.Equals, http.StatusCreated)
c.Assert(fresp.ETag(), chk.Not(chk.Equals), "")
c.Assert(fresp.LastModified(), chk.Not(chk.Equals), "")
c.Assert(fresp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(fresp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(fresp.Date(), chk.Not(chk.Equals), "")
// list the directory create above.
// expected number of file inside the dir is 2 i.e one
// inside the dir itself and one inside the sub-dir
// expected number of sub-dir inside the dir is 1
continuationMarker := ""
lresp, err := dirUrl.ListDirectorySegment(context.Background(), &continuationMarker, true)
c.Assert(err, chk.IsNil)
c.Assert(lresp.Response().StatusCode, chk.Equals, http.StatusOK)
c.Assert(len(lresp.Files()), chk.Equals, 2)
c.Assert(len(lresp.Directories()), chk.Equals, 1)
c.Assert(lresp.ETag(), chk.Equals, "")
c.Assert(lresp.LastModified(), chk.Equals, "")
c.Assert(lresp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(lresp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(lresp.Date(), chk.Not(chk.Equals), "")
}
func (dus *DirectoryUrlSuite) TestListDirectoryWithSpaces(c *chk.C) {
// Create file system
fsu := getBfsServiceURL()
fsURL, _ := createNewFileSystem(c, fsu)
defer delFileSystem(c, fsURL)
// Create a directory inside filesystem
dirUrl := fsURL.NewDirectoryURL("New Folder Test 2")
_, err := dirUrl.Create(context.Background())
defer deleteDirectory(c, dirUrl)
// Create a file inside directory
fileUrl, _ := getFileURLFromDirectory(c, dirUrl)
_, err = fileUrl.Create(context.Background(), azbfs.BlobFSHTTPHeaders{})
defer delFile(c, fileUrl)
// list the directory created above.
// expected number of files inside the dir is 1
continuationMarker := ""
lresp, err := dirUrl.ListDirectorySegment(context.Background(), &continuationMarker, true)
c.Assert(err, chk.IsNil)
c.Assert(lresp.Response().StatusCode, chk.Equals, http.StatusOK)
c.Assert(len(lresp.Files()), chk.Equals, 1)
c.Assert(len(lresp.Directories()), chk.Equals, 0)
c.Assert(lresp.ETag(), chk.Equals, "")
c.Assert(lresp.LastModified(), chk.Equals, "")
c.Assert(lresp.XMsRequestID(), chk.Not(chk.Equals), "")
c.Assert(lresp.XMsVersion(), chk.Not(chk.Equals), "")
c.Assert(lresp.Date(), chk.Not(chk.Equals), "")
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"fmt"
"time"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
)
// upload related
const UploadMaxTries = 20
const UploadTryTimeout = time.Minute * 15
const UploadRetryDelay = time.Second * 1
const UploadMaxRetryDelay = time.Second * 60
// download related
const MaxRetryPerDownloadBody = 5
// TODO: consider to unify the retry options.
const DownloadTryTimeout = time.Minute * 15
const DownloadRetryDelay = time.Second * 1
const DownloadMaxRetryDelay = time.Second * 60
// pacer related
const PacerTimeToWaitInMs = 50
//////////////////////////////////////////////////////////////////////////////////////////////////////////
// These types are define the STE Coordinator
type newJobXfer func(jptm IJobPartTransferMgr, pipeline pipeline.Pipeline, pacer *pacer)
// same as newJobXfer, but with an extra parameter
type newJobXferWithDownloaderFactory = func(jptm IJobPartTransferMgr, pipeline pipeline.Pipeline, pacer *pacer, df downloaderFactory)
type newJobXferWithSenderFactory = func(jptm IJobPartTransferMgr, pipeline pipeline.Pipeline, pacer *pacer, sf senderFactory, sipf sourceInfoProviderFactory)
// Takes a multi-purpose download function, and makes it ready to user with a specific type of downloader
func parameterizeDownload(targetFunction newJobXferWithDownloaderFactory, df downloaderFactory) newJobXfer {
return func(jptm IJobPartTransferMgr, pipeline pipeline.Pipeline, pacer *pacer) {
targetFunction(jptm, pipeline, pacer, df)
}
}
// Takes a multi-purpose send function, and makes it ready to use with a specific type of sender
func parameterizeSend(targetFunction newJobXferWithSenderFactory, sf senderFactory, sipf sourceInfoProviderFactory) newJobXfer {
return func(jptm IJobPartTransferMgr, pipeline pipeline.Pipeline, pacer *pacer) {
targetFunction(jptm, pipeline, pacer, sf, sipf)
}
}
// the xfer factory is generated based on the type of source and destination
func computeJobXfer(fromTo common.FromTo, blobType common.BlobType) newJobXfer {
switch fromTo {
case common.EFromTo.BlobLocal(): // download from Azure Blob to local file system
return parameterizeDownload(remoteToLocal, newBlobDownloader)
case common.EFromTo.LocalBlob(): // upload from local file system to Azure blob
switch blobType {
case common.EBlobType.None(),
common.EBlobType.BlockBlob():
return parameterizeSend(anyToRemote, newBlockBlobUploader, newLocalSourceInfoProvider)
case common.EBlobType.PageBlob():
return parameterizeSend(anyToRemote, newPageBlobUploader, newLocalSourceInfoProvider)
case common.EBlobType.AppendBlob():
return parameterizeSend(anyToRemote, newAppendBlobUploader, newLocalSourceInfoProvider)
}
case common.EFromTo.BlobTrash():
return DeleteBlobPrologue
case common.EFromTo.FileLocal(): // download from Azure File to local file system
return parameterizeDownload(remoteToLocal, newAzureFilesDownloader)
case common.EFromTo.LocalFile(): // upload from local file system to Azure File
return parameterizeSend(anyToRemote, newAzureFilesUploader, newLocalSourceInfoProvider)
case common.EFromTo.FileTrash():
return DeleteFilePrologue
case common.EFromTo.LocalBlobFS():
return parameterizeSend(anyToRemote, newBlobFSUploader, newLocalSourceInfoProvider)
case common.EFromTo.BlobFSLocal():
return parameterizeDownload(remoteToLocal, newBlobFSDownloader)
case common.EFromTo.BlobBlob():
return parameterizeSend(anyToRemote, newURLToBlobCopier, newBlobSourceInfoProvider)
case common.EFromTo.FileBlob():
return parameterizeSend(anyToRemote, newURLToBlobCopier, newFileSourceInfoProvider)
case common.EFromTo.S3Blob():
return parameterizeSend(anyToRemote, newURLToBlobCopier, newS3SourceInfoProvider)
}
panic(fmt.Errorf("Unrecognized from-to: %q", fromTo.String()))
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package common
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"net"
"net/http"
"os"
"strconv"
"strings"
"time"
"github.com/Azure/go-autorest/autorest/adal"
)
// ApplicationID represents 1st party ApplicationID for AzCopy.
//const ApplicationID = "a45c21f4-7066-40b4-97d8-14f4313c3caa" // 3rd party test ApplicationID for AzCopy.
const ApplicationID = "579a7132-0e58-4d80-b1e1-7a1e2d337859"
// Resource used in azure storage OAuth authentication
const Resource = "https://storage.azure.com"
const DefaultTenantID = "common"
const DefaultActiveDirectoryEndpoint = "https://login.microsoftonline.com"
const IMDSAPIVersion = "2018-02-01"
const MSIEndpoint = "http://169.254.169.254/metadata/identity/oauth2/token"
var DefaultTokenExpiryWithinThreshold = time.Minute * 10
// UserOAuthTokenManager for token management.
type UserOAuthTokenManager struct {
oauthClient *http.Client
credCache *CredCache
}
// NewUserOAuthTokenManagerInstance creates a token manager instance.
func NewUserOAuthTokenManagerInstance(credCacheOptions CredCacheOptions) *UserOAuthTokenManager {
return &UserOAuthTokenManager{
oauthClient: newAzcopyHTTPClient(),
credCache: NewCredCache(credCacheOptions),
}
}
func newAzcopyHTTPClient() *http.Client {
return &http.Client{
Transport: &http.Transport{
Proxy: http.ProxyFromEnvironment,
// We use Dial instead of DialContext as DialContext has been reported to cause slower performance.
Dial /*Context*/ : (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
DualStack: true,
}).Dial, /*Context*/
MaxIdleConns: 0, // No limit
MaxIdleConnsPerHost: 1000,
IdleConnTimeout: 180 * time.Second,
TLSHandshakeTimeout: 10 * time.Second,
ExpectContinueTimeout: 1 * time.Second,
DisableKeepAlives: false,
DisableCompression: true,
MaxResponseHeaderBytes: 0,
//ResponseHeaderTimeout: time.Duration{},
//ExpectContinueTimeout: time.Duration{},
},
}
}
// GetTokenInfo gets token info, it follows rule:
// 1. If there is token passed from environment variable(note this is only for testing purpose),
// use token passed from environment variable.
// 2. Otherwise, try to get token from cache.
// This method either successfully return token, or return error.
func (uotm *UserOAuthTokenManager) GetTokenInfo(ctx context.Context) (*OAuthTokenInfo, error) {
var tokenInfo *OAuthTokenInfo
var err error
if tokenInfo, err = uotm.getTokenInfoFromEnvVar(ctx); err == nil || !IsErrorEnvVarOAuthTokenInfoNotSet(err) {
// Scenario-Test: unattended testing with oauthTokenInfo set through environment variable
// Note: Whenever environment variable is set in the context, it will overwrite the cached token info.
if err != nil { // this is the case when env var exists while get token info failed
return nil, err
}
} else { // Scenario: session mode which get token from cache
if tokenInfo, err = uotm.getCachedTokenInfo(ctx); err != nil {
return nil, err
}
}
if tokenInfo == nil || tokenInfo.IsEmpty() {
return nil, errors.New("invalid state, cannot get valid token info")
}
return tokenInfo, nil
}
// MSILogin tries to get token from MSI, persist indicates whether to cache the token on local disk.
func (uotm *UserOAuthTokenManager) MSILogin(ctx context.Context, identityInfo IdentityInfo, persist bool) (*OAuthTokenInfo, error) {
if err := identityInfo.Validate(); err != nil {
return nil, err
}
oAuthTokenInfo := &OAuthTokenInfo{
Identity: true,
IdentityInfo: identityInfo,
}
token, err := oAuthTokenInfo.GetNewTokenFromMSI(ctx)
if err != nil {
return nil, err
}
oAuthTokenInfo.Token = *token
if persist {
err = uotm.credCache.SaveToken(*oAuthTokenInfo)
if err != nil {
return nil, err
}
}
return oAuthTokenInfo, nil
}
// UserLogin interactively logins in with specified tenantID and activeDirectoryEndpoint, persist indicates whether to
// cache the token on local disk.
func (uotm *UserOAuthTokenManager) UserLogin(tenantID, activeDirectoryEndpoint string, persist bool) (*OAuthTokenInfo, error) {
// Use default tenant ID and active directory endpoint, if nothing specified.
if tenantID == "" {
tenantID = DefaultTenantID
}
if activeDirectoryEndpoint == "" {
activeDirectoryEndpoint = DefaultActiveDirectoryEndpoint
}
// Init OAuth config
oauthConfig, err := adal.NewOAuthConfig(activeDirectoryEndpoint, tenantID)
if err != nil {
return nil, err
}
// Acquire the device code
deviceCode, err := adal.InitiateDeviceAuth(
uotm.oauthClient,
*oauthConfig,
ApplicationID,
Resource)
if err != nil {
return nil, fmt.Errorf("failed to login with tenantID %q, Azure directory endpoint %q, %v",
tenantID, activeDirectoryEndpoint, err)
}
// Display the authentication message
fmt.Println(*deviceCode.Message + "\n")
if tenantID == "" || tenantID == "common" {
fmt.Println("INFO: Logging in under the \"Common\" tenant. This will log the account in under its home tenant.")
fmt.Println("INFO: If you plan to use AzCopy with a B2B account (where the account's home tenant is separate from the tenant of the target storage account), please sign in under the target tenant with --tenant-id")
}
// Wait here until the user is authenticated
// TODO: check if adal Go SDK has new method which supports context, currently ctrl-C can stop the login in console interactively.
token, err := adal.WaitForUserCompletion(uotm.oauthClient, deviceCode)
if err != nil {
return nil, fmt.Errorf("failed to login with tenantID %q, Azure directory endpoint %q, %v",
tenantID, activeDirectoryEndpoint, err)
}
oAuthTokenInfo := OAuthTokenInfo{
Token: *token,
Tenant: tenantID,
ActiveDirectoryEndpoint: activeDirectoryEndpoint,
}
if persist {
err = uotm.credCache.SaveToken(oAuthTokenInfo)
if err != nil {
return nil, err
}
}
return &oAuthTokenInfo, nil
}
// getCachedTokenInfo get a fresh token from local disk cache.
// If access token is expired, it will refresh the token.
// If refresh token is expired, the method will fail and return failure reason.
// Fresh token is persisted if acces token or refresh token is changed.
func (uotm *UserOAuthTokenManager) getCachedTokenInfo(ctx context.Context) (*OAuthTokenInfo, error) {
hasToken, err := uotm.credCache.HasCachedToken()
if err != nil {
return nil, fmt.Errorf("no cached token found, please log in with azcopy's login command, %v", err)
}
if !hasToken {
return nil, errors.New("no cached token found, please log in with azcopy's login command")
}
tokenInfo, err := uotm.credCache.LoadToken()
if err != nil {
return nil, fmt.Errorf("get cached token failed, %v", err)
}
freshToken, err := tokenInfo.Refresh(ctx)
if err != nil {
return nil, fmt.Errorf("get cached token failed to ensure token fresh, please log in with azcopy's login command again, %v", err)
}
// Update token cache, if token is updated.
if freshToken.AccessToken != tokenInfo.AccessToken || freshToken.RefreshToken != tokenInfo.RefreshToken {
tokenInfo.Token = *freshToken
if err := uotm.credCache.SaveToken(*tokenInfo); err != nil {
return nil, err
}
}
return tokenInfo, nil
}
// HasCachedToken returns if there is cached token in token manager.
func (uotm *UserOAuthTokenManager) HasCachedToken() (bool, error) {
return uotm.credCache.HasCachedToken()
}
// RemoveCachedToken delete all the cached token.
func (uotm *UserOAuthTokenManager) RemoveCachedToken() error {
return uotm.credCache.RemoveCachedToken()
}
//====================================================================================
// EnvVarOAuthTokenInfo passes oauth token info into AzCopy through environment variable.
// Note: this is only used for testing, and not encouraged to be used in production environments.
const EnvVarOAuthTokenInfo = "AZCOPY_OAUTH_TOKEN_INFO"
// ErrorCodeEnvVarOAuthTokenInfoNotSet defines error code when environment variable AZCOPY_OAUTH_TOKEN_INFO is not set.
const ErrorCodeEnvVarOAuthTokenInfoNotSet = "environment variable AZCOPY_OAUTH_TOKEN_INFO is not set"
// EnvVarOAuthTokenInfoExists verifies if environment variable for OAuthTokenInfo is specified.
// The method returns true if the environment variable is set.
// Note: This is useful for only checking whether the env var exists, please use getTokenInfoFromEnvVar
// directly in the case getting token info is necessary.
func EnvVarOAuthTokenInfoExists() bool {
if os.Getenv(EnvVarOAuthTokenInfo) == "" {
return false
}
return true
}
// IsErrorEnvVarOAuthTokenInfoNotSet verifies if an error indicates environment variable AZCOPY_OAUTH_TOKEN_INFO is not set.
func IsErrorEnvVarOAuthTokenInfoNotSet(err error) bool {
if err != nil && strings.Contains(err.Error(), ErrorCodeEnvVarOAuthTokenInfoNotSet) {
return true
}
return false
}
// getTokenInfoFromEnvVar gets token info from environment variable.
func (uotm *UserOAuthTokenManager) getTokenInfoFromEnvVar(ctx context.Context) (*OAuthTokenInfo, error) {
rawToken := os.Getenv(EnvVarOAuthTokenInfo)
if rawToken == "" {
return nil, errors.New(ErrorCodeEnvVarOAuthTokenInfoNotSet)
}
// Remove the env var after successfully fetching once,
// in case of env var is further spreading into child processes unexpectly.
os.Setenv(EnvVarOAuthTokenInfo, "")
tokenInfo, err := jsonToTokenInfo([]byte(rawToken))
if err != nil {
return nil, fmt.Errorf("get token from environment variable failed to unmarshal token, %v", err)
}
if tokenInfo.TokenRefreshSource != TokenRefreshSourceTokenStore {
refreshedToken, err := tokenInfo.Refresh(ctx)
if err != nil {
return nil, fmt.Errorf("get token from environment variable failed to ensure token fresh, %v", err)
}
tokenInfo.Token = *refreshedToken
}
return tokenInfo, nil
}
//====================================================================================
// TokenRefreshSourceTokenStore indicates enabling azcopy oauth integration through tokenstore.
// Note: This should be only used for internal integrations.
const TokenRefreshSourceTokenStore = "tokenstore"
// OAuthTokenInfo contains info necessary for refresh OAuth credentials.
type OAuthTokenInfo struct {
adal.Token
Tenant string `json:"_tenant"`
ActiveDirectoryEndpoint string `json:"_ad_endpoint"`
TokenRefreshSource string `json:"_token_refresh_source"`
Identity bool `json:"_identity"`
IdentityInfo IdentityInfo
// Note: ClientID should be only used for internal integrations through env var with refresh token.
// It indicates the Application ID assigned to your app when you registered it with Azure AD.
// In this case AzCopy refresh token on behalf of caller.
// For more details, please refer to
// https://docs.microsoft.com/en-us/azure/active-directory/develop/v1-protocols-oauth-code#refreshing-the-access-tokens
ClientID string `json:"_client_id"`
}
// IdentityInfo contains info for MSI.
type IdentityInfo struct {
ClientID string `json:"_identity_client_id"`
ObjectID string `json:"_identity_object_id"`
MSIResID string `json:"_identity_msi_res_id"`
}
// Validate validates identity info, at most only one of clientID, objectID or MSI resource ID could be set.
func (identityInfo *IdentityInfo) Validate() error {
v := make(map[string]bool, 3)
if identityInfo.ClientID != "" {
v[identityInfo.ClientID] = true
}
if identityInfo.ObjectID != "" {
v[identityInfo.ObjectID] = true
}
if identityInfo.MSIResID != "" {
v[identityInfo.MSIResID] = true
}
if len(v) > 1 {
return errors.New("client ID, object ID and MSI resource ID are mutually exclusive")
}
return nil
}
// Refresh gets new token with token info.
func (credInfo *OAuthTokenInfo) Refresh(ctx context.Context) (*adal.Token, error) {
if credInfo.TokenRefreshSource == TokenRefreshSourceTokenStore {
return credInfo.GetNewTokenFromTokenStore(ctx)
}
if credInfo.Identity {
return credInfo.GetNewTokenFromMSI(ctx)
}
return credInfo.RefreshTokenWithUserCredential(ctx)
}
var msiTokenHTTPClient = newAzcopyHTTPClient()
// Single instance token store credential cache shared by entire azcopy process.
var tokenStoreCredCache = NewCredCacheInternalIntegration(CredCacheOptions{
KeyName: "azcopy/aadtoken/" + strconv.Itoa(os.Getpid()),
ServiceName: "azcopy",
AccountName: "aadtoken/" + strconv.Itoa(os.Getpid()),
})
// GetNewTokenFromTokenStore gets token from token store. (Credential Manager in Windows, keyring in Linux and keychain in MacOS.)
// Note: This approach should only be used in internal integrations.
func (credInfo *OAuthTokenInfo) GetNewTokenFromTokenStore(ctx context.Context) (*adal.Token, error) {
hasToken, err := tokenStoreCredCache.HasCachedToken()
if err != nil || !hasToken {
return nil, fmt.Errorf("no cached token found in Token Store Mode(SE), %v", err)
}
tokenInfo, err := tokenStoreCredCache.LoadToken()
if err != nil {
return nil, fmt.Errorf("get cached token failed in Token Store Mode(SE), %v", err)
}
return &(tokenInfo.Token), nil
}
// GetNewTokenFromMSI gets token from Azure Instance Metadata Service identity endpoint.
// For details, please refer to https://docs.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/overview
func (credInfo *OAuthTokenInfo) GetNewTokenFromMSI(ctx context.Context) (*adal.Token, error) {
// Prepare request to get token from Azure Instance Metadata Service identity endpoint.
req, err := http.NewRequest("GET", MSIEndpoint, nil)
if err != nil {
return nil, fmt.Errorf("failed to create request, %v", err)
}
params := req.URL.Query()
params.Set("resource", Resource)
params.Set("api-version", IMDSAPIVersion)
if credInfo.IdentityInfo.ClientID != "" {
params.Set("client_id", credInfo.IdentityInfo.ClientID)
}
if credInfo.IdentityInfo.ObjectID != "" {
params.Set("object_id", credInfo.IdentityInfo.ObjectID)
}
if credInfo.IdentityInfo.MSIResID != "" {
params.Set("msi_res_id", credInfo.IdentityInfo.MSIResID)
}
req.URL.RawQuery = params.Encode()
req.Header.Set("Metadata", "true")
// Set context.
req.WithContext(ctx)
// Send request
resp, err := msiTokenHTTPClient.Do(req)
if err != nil {
return nil, fmt.Errorf("please check whether MSI is enabled on this PC, to enable MSI please refer to https://docs.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/qs-configure-portal-windows-vm#enable-system-assigned-identity-on-an-existing-vm. (Error details: %v)", err)
}
defer func() { // resp and Body should not be nil
io.Copy(ioutil.Discard, resp.Body)
resp.Body.Close()
}()
// Check if the status code indicates success
// The request returns 200 currently, add 201 and 202 as well for possible extension.
if !(HTTPResponseExtension{Response: resp}).IsSuccessStatusCode(http.StatusOK, http.StatusCreated, http.StatusAccepted) {
return nil, fmt.Errorf("failed to get token from msi, status code: %v", resp.StatusCode)
}
b, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
result := &adal.Token{}
if len(b) > 0 {
b = ByteSliceExtension{ByteSlice: b}.RemoveBOM()
if err := json.Unmarshal(b, result); err != nil {
return nil, fmt.Errorf("failed to unmarshal response body, %v", err)
}
} else {
return nil, errors.New("failed to get token from msi")
}
return result, nil
}
// RefreshTokenWithUserCredential gets new token with user credential through refresh.
func (credInfo *OAuthTokenInfo) RefreshTokenWithUserCredential(ctx context.Context) (*adal.Token, error) {
oauthConfig, err := adal.NewOAuthConfig(credInfo.ActiveDirectoryEndpoint, credInfo.Tenant)
if err != nil {
return nil, err
}
// ClientID in credInfo is optional which is used for internal integration only.
// Use AzCopy's 1st party applicationID for refresh by default.
spt, err := adal.NewServicePrincipalTokenFromManualToken(
*oauthConfig,
IffString(credInfo.ClientID != "", credInfo.ClientID, ApplicationID),
Resource,
credInfo.Token)
if err != nil {
return nil, err
}
if err := spt.RefreshWithContext(ctx); err != nil {
return nil, err
}
newToken := spt.Token()
return &newToken, nil
}
// IsEmpty returns if current OAuthTokenInfo is empty and doesn't contain any useful info.
func (credInfo OAuthTokenInfo) IsEmpty() bool {
if credInfo.Tenant == "" && credInfo.ActiveDirectoryEndpoint == "" && credInfo.Token.IsZero() && !credInfo.Identity {
return true
}
return false
}
// toJSON converts OAuthTokenInfo to json format.
func (credInfo OAuthTokenInfo) toJSON() ([]byte, error) {
return json.Marshal(credInfo)
}
// jsonToTokenInfo converts bytes to OAuthTokenInfo
func jsonToTokenInfo(b []byte) (*OAuthTokenInfo, error) {
var OAuthTokenInfo OAuthTokenInfo
if err := json.Unmarshal(b, &OAuthTokenInfo); err != nil {
return nil, err
}
return &OAuthTokenInfo, nil
}
//====================================================================================
// TestOAuthInjection controls variables for OAuth testing injections
type TestOAuthInjection struct {
DoTokenRefreshInjection bool
TokenRefreshDuration time.Duration
}
// GlobalTestOAuthInjection is the global setting for OAuth testing injection control
var GlobalTestOAuthInjection = TestOAuthInjection{
DoTokenRefreshInjection: false,
TokenRefreshDuration: time.Second * 10,
}
// TODO: Add pipeline policy for token refreshing validating.
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"context"
"encoding/json"
"fmt"
"github.com/Azure/azure-pipeline-go/pipeline"
"time"
"net/url"
"strings"
"sync/atomic"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-azcopy/ste"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/spf13/cobra"
)
// a max is set because we cannot buffer infinite amount of destination file info in memory
const MaxNumberOfFilesAllowedInSync = 10000000
type rawSyncCmdArgs struct {
src string
dst string
recursive bool
// options from flags
blockSizeMB uint32
logVerbosity string
include string
exclude string
followSymlinks bool
putMd5 bool
md5ValidationOption string
// this flag indicates the user agreement with respect to deleting the extra files at the destination
// which do not exists at source. With this flag turned on/off, users will not be asked for permission.
// otherwise the user is prompted to make a decision
deleteDestination string
}
func (raw *rawSyncCmdArgs) blockSizeInBytes() uint32 {
return raw.blockSizeMB * 1024 * 1024 // internally we use bytes, but users' convenience the command line uses MB
}
func (raw *rawSyncCmdArgs) parsePatterns(pattern string) (cookedPatterns []string) {
cookedPatterns = make([]string, 0)
rawPatterns := strings.Split(pattern, ";")
for _, pattern := range rawPatterns {
// skip the empty patterns
if len(pattern) != 0 {
cookedPatterns = append(cookedPatterns, pattern)
}
}
return
}
// given a valid URL, parse out the SAS portion
func (raw *rawSyncCmdArgs) separateSasFromURL(rawURL string) (cleanURL string, sas string) {
fromUrl, _ := url.Parse(rawURL)
// TODO add support for other service URLs
blobParts := azblob.NewBlobURLParts(*fromUrl)
sas = blobParts.SAS.Encode()
// get clean URL without SAS and trailing / in the path
blobParts.SAS = azblob.SASQueryParameters{}
bUrl := blobParts.URL()
bUrl.Path = strings.TrimSuffix(bUrl.Path, common.AZCOPY_PATH_SEPARATOR_STRING)
cleanURL = bUrl.String()
return
}
// validates and transform raw input into cooked input
func (raw *rawSyncCmdArgs) cook() (cookedSyncCmdArgs, error) {
cooked := cookedSyncCmdArgs{}
cooked.fromTo = inferFromTo(raw.src, raw.dst)
if cooked.fromTo == common.EFromTo.Unknown() {
return cooked, fmt.Errorf("Unable to infer the source '%s' / destination '%s'. ", raw.src, raw.dst)
} else if cooked.fromTo == common.EFromTo.LocalBlob() {
cooked.source = cleanLocalPath(raw.src)
cooked.destination, cooked.destinationSAS = raw.separateSasFromURL(raw.dst)
} else if cooked.fromTo == common.EFromTo.BlobLocal() {
cooked.source, cooked.sourceSAS = raw.separateSasFromURL(raw.src)
cooked.destination = cleanLocalPath(raw.dst)
} else {
return cooked, fmt.Errorf("source '%s' / destination '%s' combination '%s' not supported for sync command ", raw.src, raw.dst, cooked.fromTo)
}
// generate a new job ID
cooked.jobID = common.NewJobID()
cooked.blockSize = raw.blockSizeInBytes()
cooked.followSymlinks = raw.followSymlinks
cooked.recursive = raw.recursive
// determine whether we should prompt the user to delete extra files
err := cooked.deleteDestination.Parse(raw.deleteDestination)
if err != nil {
return cooked, err
}
// parse the filter patterns
cooked.include = raw.parsePatterns(raw.include)
cooked.exclude = raw.parsePatterns(raw.exclude)
err = cooked.logVerbosity.Parse(raw.logVerbosity)
if err != nil {
return cooked, err
}
cooked.putMd5 = raw.putMd5
if err = validatePutMd5(cooked.putMd5, cooked.fromTo); err != nil {
return cooked, err
}
err = cooked.md5ValidationOption.Parse(raw.md5ValidationOption)
if err != nil {
return cooked, err
}
if err = validateMd5Option(cooked.md5ValidationOption, cooked.fromTo); err != nil {
return cooked, err
}
return cooked, nil
}
type cookedSyncCmdArgs struct {
// NOTE: for the 64 bit atomic functions to work on a 32 bit system, we have to guarantee the right 64-bit alignment
// so the 64 bit integers are placed first in the struct to avoid future breaks
// refer to: https://golang.org/pkg/sync/atomic/#pkg-note-BUG
// defines the number of files listed at the source and compared.
atomicSourceFilesScanned uint64
// defines the number of files listed at the destination and compared.
atomicDestinationFilesScanned uint64
// defines the scanning status of the sync operation.
// 0 means scanning is in progress and 1 means scanning is complete.
atomicScanningStatus uint32
// defines whether first part has been ordered or not.
// 0 means first part is not ordered and 1 means first part is ordered.
atomicFirstPartOrdered uint32
// deletion count keeps track of how many extra files from the destination were removed
atomicDeletionCount uint32
source string
sourceSAS string
destination string
destinationSAS string
fromTo common.FromTo
credentialInfo common.CredentialInfo
// filters
recursive bool
followSymlinks bool
include []string
exclude []string
// options
putMd5 bool
md5ValidationOption common.HashValidationOption
blockSize uint32
logVerbosity common.LogLevel
// commandString hold the user given command which is logged to the Job log file
commandString string
// generated
jobID common.JobID
// variables used to calculate progress
// intervalStartTime holds the last time value when the progress summary was fetched
// the value of this variable is used to calculate the throughput
// it gets updated every time the progress summary is fetched
intervalStartTime time.Time
intervalBytesTransferred uint64
// used to calculate job summary
jobStartTime time.Time
// this flag is set by the enumerator
// it is useful to indicate whether we are simply waiting for the purpose of cancelling
// this is set to true once the final part has been dispatched
isEnumerationComplete bool
// this flag indicates the user agreement with respect to deleting the extra files at the destination
// which do not exists at source. With this flag turned on/off, users will not be asked for permission.
// otherwise the user is prompted to make a decision
deleteDestination common.DeleteDestination
}
func (cca *cookedSyncCmdArgs) incrementDeletionCount() {
atomic.AddUint32(&cca.atomicDeletionCount, 1)
}
func (cca *cookedSyncCmdArgs) getDeletionCount() uint32 {
return atomic.LoadUint32(&cca.atomicDeletionCount)
}
// setFirstPartOrdered sets the value of atomicFirstPartOrdered to 1
func (cca *cookedSyncCmdArgs) setFirstPartOrdered() {
atomic.StoreUint32(&cca.atomicFirstPartOrdered, 1)
}
// firstPartOrdered returns the value of atomicFirstPartOrdered.
func (cca *cookedSyncCmdArgs) firstPartOrdered() bool {
return atomic.LoadUint32(&cca.atomicFirstPartOrdered) > 0
}
// setScanningComplete sets the value of atomicScanningStatus to 1.
func (cca *cookedSyncCmdArgs) setScanningComplete() {
atomic.StoreUint32(&cca.atomicScanningStatus, 1)
}
// scanningComplete returns the value of atomicScanningStatus.
func (cca *cookedSyncCmdArgs) scanningComplete() bool {
return atomic.LoadUint32(&cca.atomicScanningStatus) > 0
}
// wraps call to lifecycle manager to wait for the job to complete
// if blocking is specified to true, then this method will never return
// if blocking is specified to false, then another goroutine spawns and wait out the job
func (cca *cookedSyncCmdArgs) waitUntilJobCompletion(blocking bool) {
// print initial message to indicate that the job is starting
glcm.Init(common.GetStandardInitOutputBuilder(cca.jobID.String(), fmt.Sprintf("%s/%s.log", azcopyLogPathFolder, cca.jobID)))
// initialize the times necessary to track progress
cca.jobStartTime = time.Now()
cca.intervalStartTime = time.Now()
cca.intervalBytesTransferred = 0
// hand over control to the lifecycle manager if blocking
if blocking {
glcm.InitiateProgressReporting(cca, true)
glcm.SurrenderControl()
} else {
// non-blocking, return after spawning a go routine to watch the job
glcm.InitiateProgressReporting(cca, true)
}
}
func (cca *cookedSyncCmdArgs) Cancel(lcm common.LifecycleMgr) {
// prompt for confirmation, except when enumeration is complete
if !cca.isEnumerationComplete {
answer := lcm.Prompt("The source enumeration is not complete, cancelling the job at this point means it cannot be resumed. Please confirm with y/n: ")
// read a line from stdin, if the answer is not yes, then abort cancel by returning
if !strings.EqualFold(answer, "y") {
return
}
}
err := cookedCancelCmdArgs{jobID: cca.jobID}.process()
if err != nil {
lcm.Error("error occurred while cancelling the job " + cca.jobID.String() + ". Failed with error " + err.Error())
}
}
type scanningProgressJsonTemplate struct {
FilesScannedAtSource uint64
FilesScannedAtDestination uint64
}
func (cca *cookedSyncCmdArgs) reportScanningProgress(lcm common.LifecycleMgr, throughput float64) {
lcm.Progress(func(format common.OutputFormat) string {
srcScanned := atomic.LoadUint64(&cca.atomicSourceFilesScanned)
dstScanned := atomic.LoadUint64(&cca.atomicDestinationFilesScanned)
if format == common.EOutputFormat.Json() {
jsonOutputTemplate := scanningProgressJsonTemplate{
FilesScannedAtSource: srcScanned,
FilesScannedAtDestination: dstScanned,
}
outputString, err := json.Marshal(jsonOutputTemplate)
common.PanicIfErr(err)
return string(outputString)
}
// text output
throughputString := ""
if cca.firstPartOrdered() {
throughputString = fmt.Sprintf(", 2-sec Throughput (Mb/s): %v", ste.ToFixed(throughput, 4))
}
return fmt.Sprintf("%v Files Scanned at Source, %v Files Scanned at Destination%s",
srcScanned, dstScanned, throughputString)
})
}
func (cca *cookedSyncCmdArgs) getJsonOfSyncJobSummary(summary common.ListSyncJobSummaryResponse) string {
// TODO figure out if deletions should be done by the enumeration engine or not
// TODO if not, remove this so that we get the proper number from the ste
summary.DeleteTotalTransfers = cca.getDeletionCount()
summary.DeleteTransfersCompleted = cca.getDeletionCount()
jsonOutput, err := json.Marshal(summary)
common.PanicIfErr(err)
return string(jsonOutput)
}
func (cca *cookedSyncCmdArgs) ReportProgressOrExit(lcm common.LifecycleMgr) {
duration := time.Now().Sub(cca.jobStartTime) // report the total run time of the job
var summary common.ListSyncJobSummaryResponse
var throughput float64
var jobDone bool
// fetch a job status and compute throughput if the first part was dispatched
if cca.firstPartOrdered() {
Rpc(common.ERpcCmd.ListSyncJobSummary(), &cca.jobID, &summary)
jobDone = summary.JobStatus.IsJobDone()
// compute the average throughput for the last time interval
bytesInMb := float64(float64(summary.BytesOverWire-cca.intervalBytesTransferred) * 8 / float64(1024*1024))
timeElapsed := time.Since(cca.intervalStartTime).Seconds()
throughput = common.Iffloat64(timeElapsed != 0, bytesInMb/timeElapsed, 0)
// reset the interval timer and byte count
cca.intervalStartTime = time.Now()
cca.intervalBytesTransferred = summary.BytesOverWire
}
// first part not dispatched, and we are still scanning
// so a special message is outputted to notice the user that we are not stalling
if !cca.scanningComplete() {
cca.reportScanningProgress(lcm, throughput)
return
}
if jobDone {
exitCode := common.EExitCode.Success()
if summary.CopyTransfersFailed+summary.DeleteTransfersFailed > 0 {
exitCode = common.EExitCode.Error()
}
lcm.Exit(func(format common.OutputFormat) string {
if format == common.EOutputFormat.Json() {
return cca.getJsonOfSyncJobSummary(summary)
}
output := fmt.Sprintf(
`
Job %s Summary
Files Scanned at Source: %v
Files Scanned at Destination: %v
Elapsed Time (Minutes): %v
Total Number Of Copy Transfers: %v
Number of Copy Transfers Completed: %v
Number of Copy Transfers Failed: %v
Number of Deletions at Destination: %v
Total Number of Bytes Transferred: %v
Total Number of Bytes Enumerated: %v
Final Job Status: %v
`,
summary.JobID.String(),
atomic.LoadUint64(&cca.atomicSourceFilesScanned),
atomic.LoadUint64(&cca.atomicDestinationFilesScanned),
ste.ToFixed(duration.Minutes(), 4),
summary.CopyTotalTransfers,
summary.CopyTransfersCompleted,
summary.CopyTransfersFailed,
cca.atomicDeletionCount,
summary.TotalBytesTransferred,
summary.TotalBytesEnumerated,
summary.JobStatus)
jobMan, exists := ste.JobsAdmin.JobMgr(summary.JobID)
if exists {
jobMan.Log(pipeline.LogInfo, output)
}
return output
}, exitCode)
}
lcm.Progress(func(format common.OutputFormat) string {
if format == common.EOutputFormat.Json() {
return cca.getJsonOfSyncJobSummary(summary)
}
// indicate whether constrained by disk or not
perfString, diskString := getPerfDisplayText(summary.PerfStrings, summary.PerfConstraint, duration)
return fmt.Sprintf("%v Done, %v Failed, %v Pending, %v Total%s, 2-sec Throughput (Mb/s): %v%s",
summary.CopyTransfersCompleted+summary.DeleteTransfersCompleted,
summary.CopyTransfersFailed+summary.DeleteTransfersFailed,
summary.CopyTotalTransfers+summary.DeleteTotalTransfers-(summary.CopyTransfersCompleted+summary.DeleteTransfersCompleted+summary.CopyTransfersFailed+summary.DeleteTransfersFailed),
summary.CopyTotalTransfers+summary.DeleteTotalTransfers, perfString, ste.ToFixed(throughput, 4), diskString)
})
}
func (cca *cookedSyncCmdArgs) process() (err error) {
ctx := context.WithValue(context.TODO(), ste.ServiceAPIVersionOverride, ste.DefaultServiceApiVersion)
// verifies credential type and initializes credential info.
// For sync, only one side need credential.
cca.credentialInfo.CredentialType, err = getCredentialType(ctx, rawFromToInfo{
fromTo: cca.fromTo,
source: cca.source,
destination: cca.destination,
sourceSAS: cca.sourceSAS,
destinationSAS: cca.destinationSAS,
})
if err != nil {
return err
}
// For OAuthToken credential, assign OAuthTokenInfo to CopyJobPartOrderRequest properly,
// the info will be transferred to STE.
if cca.credentialInfo.CredentialType == common.ECredentialType.OAuthToken() {
// Message user that they are using Oauth token for authentication,
// in case of silently using cached token without consciousness。
glcm.Info("Using OAuth token for authentication.")
uotm := GetUserOAuthTokenManagerInstance()
// Get token from env var or cache.
if tokenInfo, err := uotm.GetTokenInfo(ctx); err != nil {
return err
} else {
cca.credentialInfo.OAuthTokenInfo = *tokenInfo
}
}
var enumerator *syncEnumerator
switch cca.fromTo {
case common.EFromTo.LocalBlob():
enumerator, err = newSyncUploadEnumerator(cca)
if err != nil {
return err
}
case common.EFromTo.BlobLocal():
enumerator, err = newSyncDownloadEnumerator(cca)
if err != nil {
return err
}
default:
return fmt.Errorf("the given source/destination pair is currently not supported")
}
// trigger the progress reporting
cca.waitUntilJobCompletion(false)
// trigger the enumeration
err = enumerator.enumerate()
if err != nil {
return err
}
return nil
}
func init() {
raw := rawSyncCmdArgs{}
// syncCmd represents the sync command
var syncCmd = &cobra.Command{
Use: "sync",
Aliases: []string{"sc", "s"},
Short: syncCmdShortDescription,
Long: syncCmdLongDescription,
Example: syncCmdExample,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 2 {
return fmt.Errorf("2 arguments source and destination are required for this command. Number of commands passed %d", len(args))
}
raw.src = args[0]
raw.dst = args[1]
return nil
},
Run: func(cmd *cobra.Command, args []string) {
cooked, err := raw.cook()
if err != nil {
glcm.Error("error parsing the input given by the user. Failed with error " + err.Error())
}
cooked.commandString = copyHandlerUtil{}.ConstructCommandStringFromArgs()
err = cooked.process()
if err != nil {
glcm.Error("Cannot perform sync due to error: " + err.Error())
}
glcm.SurrenderControl()
},
}
rootCmd.AddCommand(syncCmd)
syncCmd.PersistentFlags().BoolVar(&raw.recursive, "recursive", true, "true by default, look into sub-directories recursively when syncing between directories.")
syncCmd.PersistentFlags().Uint32Var(&raw.blockSizeMB, "block-size-mb", 0, "use this block size (specified in MiB) when uploading to/downloading from Azure Storage. Default is automatically calculated based on file size.")
syncCmd.PersistentFlags().StringVar(&raw.include, "include", "", "only include files whose name matches the pattern list. Example: *.jpg;*.pdf;exactName")
syncCmd.PersistentFlags().StringVar(&raw.exclude, "exclude", "", "exclude files whose name matches the pattern list. Example: *.jpg;*.pdf;exactName")
syncCmd.PersistentFlags().StringVar(&raw.logVerbosity, "log-level", "INFO", "define the log verbosity for the log file, available levels: INFO(all requests/responses), WARNING(slow responses), ERROR(only failed requests), and NONE(no output logs).")
syncCmd.PersistentFlags().StringVar(&raw.deleteDestination, "delete-destination", "false", "defines whether to delete extra files from the destination that are not present at the source. Could be set to true, false, or prompt. "+
"If set to prompt, user will be asked a question before scheduling files/blobs for deletion.")
syncCmd.PersistentFlags().BoolVar(&raw.putMd5, "put-md5", false, "create an MD5 hash of each file, and save the hash as the Content-MD5 property of the destination blob/file. (By default the hash is NOT created.) Only available when uploading.")
syncCmd.PersistentFlags().StringVar(&raw.md5ValidationOption, "check-md5", common.DefaultHashValidationOption.String(), "specifies how strictly MD5 hashes should be validated when downloading. Only available when downloading. Available options: NoCheck, LogOnly, FailIfDifferent, FailIfDifferentOrMissing.")
// TODO follow sym link is not implemented, clarify behavior first
//syncCmd.PersistentFlags().BoolVar(&raw.followSymlinks, "follow-symlinks", false, "follow symbolic links when performing sync from local file system.")
// TODO sync does not support any BlobAttributes, this functionality should be added
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"fmt"
"io/ioutil"
"os"
"path"
"path/filepath"
"strings"
"github.com/Azure/azure-storage-azcopy/common"
)
type localTraverser struct {
fullPath string
recursive bool
// a generic function to notify that a new stored object has been enumerated
incrementEnumerationCounter func()
}
func (t *localTraverser) traverse(processor objectProcessor, filters []objectFilter) (err error) {
singleFileInfo, isSingleFile, err := t.getInfoIfSingleFile()
if err != nil {
return fmt.Errorf("cannot scan the path %s, please verify that it is a valid", t.fullPath)
}
// if the path is a single file, then pass it through the filters and send to processor
if isSingleFile {
t.incrementEnumerationCounter()
err = processIfPassedFilters(filters, newStoredObject(singleFileInfo.Name(),
"", // relative path makes no sense when the full path already points to the file
singleFileInfo.ModTime(), singleFileInfo.Size(), nil, blobTypeNA), processor)
return
} else {
if t.recursive {
err = filepath.Walk(t.fullPath, func(filePath string, fileInfo os.FileInfo, fileError error) error {
if fileError != nil {
glcm.Info(fmt.Sprintf("Accessing %s failed with error: %s", filePath, fileError))
return nil
}
// skip the subdirectories
if fileInfo.IsDir() {
return nil
}
t.incrementEnumerationCounter()
// the relative path needs to be computed from the full path
computedRelativePath := strings.TrimPrefix(cleanLocalPath(filePath), t.fullPath)
// leading path separators are trimmed away
computedRelativePath = strings.TrimPrefix(computedRelativePath, common.AZCOPY_PATH_SEPARATOR_STRING)
return processIfPassedFilters(filters, newStoredObject(fileInfo.Name(), computedRelativePath,
fileInfo.ModTime(), fileInfo.Size(), nil, blobTypeNA), processor)
})
return
} else {
// if recursive is off, we only need to scan the files immediately under the fullPath
files, err := ioutil.ReadDir(t.fullPath)
if err != nil {
return err
}
// go through the files and return if any of them fail to process
for _, singleFile := range files {
if singleFile.IsDir() {
continue
}
t.incrementEnumerationCounter()
err = processIfPassedFilters(filters, newStoredObject(singleFile.Name(), singleFile.Name(), singleFile.ModTime(), singleFile.Size(), nil, blobTypeNA), processor)
if err != nil {
return err
}
}
}
}
return
}
func replacePathSeparators(path string) string {
if os.PathSeparator != common.AZCOPY_PATH_SEPARATOR_CHAR {
return strings.Replace(path, string(os.PathSeparator), common.AZCOPY_PATH_SEPARATOR_STRING, -1)
} else {
return path
}
}
func (t *localTraverser) getInfoIfSingleFile() (os.FileInfo, bool, error) {
fileInfo, err := os.Stat(t.fullPath)
if err != nil {
return nil, false, err
}
if fileInfo.IsDir() {
return nil, false, nil
}
return fileInfo, true, nil
}
func newLocalTraverser(fullPath string, recursive bool, incrementEnumerationCounter func()) *localTraverser {
traverser := localTraverser{
fullPath: cleanLocalPath(fullPath),
recursive: recursive,
incrementEnumerationCounter: incrementEnumerationCounter}
return &traverser
}
func cleanLocalPath(localPath string) string {
normalizedPath := path.Clean(replacePathSeparators(localPath))
// detect if we are targeting a network share
if strings.HasPrefix(localPath, "//") || strings.HasPrefix(localPath, `\\`) {
// if yes, we have trimmed away one of the leading slashes, so add it back
normalizedPath = common.AZCOPY_PATH_SEPARATOR_STRING + normalizedPath
} else if len(localPath) == 3 && (strings.HasSuffix(localPath, `:\`) || strings.HasSuffix(localPath, ":/")) ||
len(localPath) == 2 && strings.HasSuffix(localPath, ":") {
// detect if we are targeting a drive (ex: either C:\ or C:)
// note that on windows there must be a slash in order to target the root drive properly
// otherwise we'd point to the path from where AzCopy is running (if AzCopy is running from the same drive)
normalizedPath += common.AZCOPY_PATH_SEPARATOR_STRING
}
return normalizedPath
}
<file_sep>package ste
import (
"bytes"
"context"
"fmt"
"net/http"
"net/url"
"runtime"
"strings"
"time"
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
)
// This file is copied and extended from Azure Storage Blob Go SDK.
// Because V10 SDK supports flexibility for injecting customized logging policy,
// and considering redact x-amz-signature's request header for logging is not a general demand for Azure Storage Blob Go SDK.
// TODO: Further discuss whether to add callback into RequestLogOptions for Azure Storage Blob Go SDK.
// RequestLogOptions configures the retry policy's behavior.
type RequestLogOptions struct {
// LogWarningIfTryOverThreshold logs a warning if a tried operation takes longer than the specified
// duration (-1=no logging; 0=default threshold).
LogWarningIfTryOverThreshold time.Duration
}
func (o RequestLogOptions) defaults() RequestLogOptions {
if o.LogWarningIfTryOverThreshold == 0 {
// It would be good to relate this to https://azure.microsoft.com/en-us/support/legal/sla/storage/v1_2/
// But this monitors the time to get the HTTP response; NOT the time to download the response body.
o.LogWarningIfTryOverThreshold = 3 * time.Second // Default to 3 seconds
}
return o
}
// NewRequestLogPolicyFactory creates a RequestLogPolicyFactory object configured using the specified options.
func NewRequestLogPolicyFactory(o RequestLogOptions) pipeline.Factory {
o = o.defaults() // Force defaults to be calculated
return pipeline.FactoryFunc(func(next pipeline.Policy, po *pipeline.PolicyOptions) pipeline.PolicyFunc {
// These variables are per-policy; shared by multiple calls to Do
var try int32
operationStart := time.Now() // If this is the 1st try, record the operation state time
return func(ctx context.Context, request pipeline.Request) (response pipeline.Response, err error) {
try++ // The first try is #1 (not #0)
// Log the outgoing request as informational
if po.ShouldLog(pipeline.LogInfo) {
b := &bytes.Buffer{}
fmt.Fprintf(b, "==> OUTGOING REQUEST (Try=%d)\n", try)
pipeline.WriteRequestWithResponse(b, prepareRequestForLogging(request), nil, nil)
po.Log(pipeline.LogInfo, b.String())
}
// Set the time for this particular retry operation and then Do the operation.
tryStart := time.Now()
response, err = next.Do(ctx, request) // Make the request
tryEnd := time.Now()
tryDuration := tryEnd.Sub(tryStart)
opDuration := tryEnd.Sub(operationStart)
logLevel, forceLog, httpError := pipeline.LogInfo, false, false // Default logging information
// If the response took too long, we'll upgrade to warning.
if o.LogWarningIfTryOverThreshold > 0 && tryDuration > o.LogWarningIfTryOverThreshold {
// Log a warning if the try duration exceeded the specified threshold
logLevel, forceLog = pipeline.LogWarning, true
}
if err == nil { // We got a response from the service
sc := response.Response().StatusCode
if ((sc >= 400 && sc <= 499) && sc != http.StatusNotFound && sc != http.StatusConflict && sc != http.StatusPreconditionFailed && sc != http.StatusRequestedRangeNotSatisfiable) || (sc >= 500 && sc <= 599) {
logLevel, forceLog, httpError = pipeline.LogError, true, true // Promote to Error any 4xx (except those listed is an error) or any 5xx
} else if sc == http.StatusNotFound || sc == http.StatusConflict || sc == http.StatusPreconditionFailed || sc == http.StatusRequestedRangeNotSatisfiable {
httpError = true
}
} else { // This error did not get an HTTP response from the service; upgrade the severity to Error
logLevel, forceLog = pipeline.LogError, true
}
if shouldLog := po.ShouldLog(logLevel); forceLog || shouldLog {
// We're going to log this; build the string to log
b := &bytes.Buffer{}
slow := ""
if o.LogWarningIfTryOverThreshold > 0 && tryDuration > o.LogWarningIfTryOverThreshold {
slow = fmt.Sprintf("[SLOW >%v]", o.LogWarningIfTryOverThreshold)
}
fmt.Fprintf(b, "==> REQUEST/RESPONSE (Try=%d/%v%s, OpTime=%v) -- ", try, tryDuration, slow, opDuration)
if err != nil { // This HTTP request did not get a response from the service
fmt.Fprint(b, "REQUEST ERROR\n")
} else {
if logLevel == pipeline.LogError {
fmt.Fprint(b, "RESPONSE STATUS CODE ERROR\n")
} else {
fmt.Fprint(b, "RESPONSE SUCCESSFULLY RECEIVED\n")
}
}
pipeline.WriteRequestWithResponse(b, prepareRequestForLogging(request), response.Response(), err)
//Dropping HTTP errors as grabbing the stack is an expensive operation & fills the log too much
//for a set of harmless errors. HTTP requests ultimately will be retried.
if logLevel <= pipeline.LogError && !httpError {
b.Write(stack())
}
msg := b.String()
if forceLog {
pipeline.ForceLog(logLevel, msg)
}
if shouldLog {
po.Log(logLevel, msg)
}
}
return response, err
}
})
}
func prepareRequestForLogging(request pipeline.Request) *http.Request {
req := request
rawQuery := req.URL.RawQuery
sigRedacted, rawQuery := common.RedactSecretQueryParam(rawQuery, "sig")
if sigRedacted {
// Make copy so we don't destroy the query parameters we actually need to send in the request
req = request.Copy()
req.Request.URL.RawQuery = rawQuery
}
return prepareRequestForServiceLogging(req)
}
func stack() []byte {
buf := make([]byte, 1024)
for {
n := runtime.Stack(buf, false)
if n < len(buf) {
return buf[:n]
}
buf = make([]byte, 2*len(buf))
}
}
///////////////////////////////////////////////////////////////////////////////////////
// Redact phase useful for blob and file service only. For other services,
// this method can directly return request.Request.
///////////////////////////////////////////////////////////////////////////////////////
func prepareRequestForServiceLogging(request pipeline.Request) *http.Request {
req := request
// As CopyBlob https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob and
// PutBlockFromURL/PutPageFromURL/AppendBlobFromURL https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url
// contains header x-ms-copy-source which could contains secrets for authentication.
// Prepare the headers for logging, with redact secrets in x-ms-copy-source header.
if exist, key := doesHeaderExistCaseInsensitive(req.Header, xMsCopySourceHeader); exist {
req = request.Copy()
url, err := url.Parse(req.Header.Get(key))
if err == nil {
rawQuery := url.RawQuery
sigRedacted, rawQuery := common.RedactSecretQueryParam(rawQuery, "sig")
xAmzSignatureRedacted, rawQuery := common.RedactSecretQueryParam(rawQuery, "x-amz-signature")
if sigRedacted || xAmzSignatureRedacted {
url.RawQuery = rawQuery
req.Header.Set(xMsCopySourceHeader, url.String())
}
}
}
return req.Request
}
const xMsCopySourceHeader = "x-ms-copy-source"
func doesHeaderExistCaseInsensitive(header http.Header, key string) (bool, string) {
for keyInHeader := range header {
if strings.EqualFold(keyInHeader, key) {
return true, keyInHeader
}
}
return false, ""
}
<file_sep>// Copyright © 2017 Microsoft <<EMAIL>>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package ste
import (
"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/azblob"
)
type appendBlobUploader struct {
appendBlobSenderBase
md5Channel chan []byte
}
func newAppendBlobUploader(jptm IJobPartTransferMgr, destination string, p pipeline.Pipeline, pacer *pacer, sip ISourceInfoProvider) (ISenderBase, error) {
senderBase, err := newAppendBlobSenderBase(jptm, destination, p, pacer, sip)
if err != nil {
return nil, err
}
return &appendBlobUploader{appendBlobSenderBase: *senderBase, md5Channel: newMd5Channel()}, nil
}
func (u *appendBlobUploader) Md5Channel() chan<- []byte {
return u.md5Channel
}
func (u *appendBlobUploader) GenerateUploadFunc(id common.ChunkID, blockIndex int32, reader common.SingleChunkReader, chunkIsWholeFile bool) chunkFunc {
appendBlockFromLocal := func() {
u.jptm.LogChunkStatus(id, common.EWaitReason.Body())
body := newLiteRequestBodyPacer(reader, u.pacer)
_, err := u.destAppendBlobURL.AppendBlock(u.jptm.Context(), body,
azblob.AppendBlobAccessConditions{
AppendPositionAccessConditions: azblob.AppendPositionAccessConditions{IfAppendPositionEqual: id.OffsetInFile},
}, nil)
if err != nil {
u.jptm.FailActiveUpload("Appending block", err)
return
}
}
return u.generateAppendBlockToRemoteFunc(id, appendBlockFromLocal)
}
func (u *appendBlobUploader) Epilogue() {
jptm := u.jptm
// set content MD5 (only way to do this is to re-PUT all the headers, this time with the MD5 included)
if jptm.TransferStatus() > 0 {
tryPutMd5Hash(jptm, u.md5Channel, func(md5Hash []byte) error {
epilogueHeaders := u.headersToApply
epilogueHeaders.ContentMD5 = md5Hash
_, err := u.destAppendBlobURL.SetHTTPHeaders(jptm.Context(), epilogueHeaders, azblob.BlobAccessConditions{})
return err
})
}
u.appendBlobSenderBase.Epilogue()
}
<file_sep>package cmd
import (
"os"
chk "gopkg.in/check.v1"
)
type localTraverserTestSuite struct{}
var _ = chk.Suite(&localTraverserTestSuite{})
func (s *localTraverserTestSuite) TestCleanLocalPath(c *chk.C) {
testCases := map[string]string{
"/user/foo/bar": "/user/foo/bar", // regular unix path with no change
"/user/foo/bar/": "/user/foo/bar", // regular unix path with extra slash
"/user//foo//bar/": "/user/foo/bar", // regular unix path with double slashes
"./foo/bar": "foo/bar", // relative unix path
"../foo/bar": "../foo/bar", // relative unix path with parent dir
"foo/bar": "foo/bar", // shorthand relative unix path
}
for orig, expected := range testCases {
c.Assert(cleanLocalPath(orig), chk.Equals, expected)
}
}
func (s *localTraverserTestSuite) TestCleanLocalPathForWindows(c *chk.C) {
// ignore these tests when not running on Windows
// as the path cleaning behavior depends on the platform
if os.PathSeparator != '\\' {
c.Skip("not running since the test applies to Windows only")
}
testCases := map[string]string{
`C:\foo\bar`: `C:/foo/bar`, // regular windows path with no change
`C:\foo\bar\`: `C:/foo/bar`, // regular windows path with extra slash
`.\foo\bar`: `foo/bar`, // relative windows path
`..\foo\bar`: `../foo/bar`, // relative windows path with parent dir
`foo\bar`: `foo/bar`, // shorthand relative windows path
`\\foo\bar\`: `//foo/bar`, // network share
`C:\`: `C:/`, // special case, the slash after colon is actually required
`D:`: `D:/`, // special case, the slash after colon is actually required
}
for orig, expected := range testCases {
c.Assert(cleanLocalPath(orig), chk.Equals, expected)
}
}
| 2d807ff33097afe3d61f316ec86a3cba6389ce19 | [
"YAML",
"Markdown",
"TOML",
"Makefile",
"Python",
"Go Module",
"Go",
"Dockerfile"
] | 91 | Go | zezha-msft/azure-storage-azcopy-1 | e94999941f9e980d7a44f3b61e7028ba776016f4 | 5e093f4bad8729aa8ed215feea8b3636bd813ca1 |
refs/heads/master | <file_sep>#include <iostream>
using namespace std;
int h;
int w;
int age;
void getHeight(){
cout << "Please enter your Height in inches: " ;
cin >> h;
}
void getWeight() {
cout << "Please enter your Weight in pounds: " ;
cin >> w;
}
void getAge(){
cout << "Please enter your Age in years: " ;
cin >> age;
}
void hatsize(){
double hat = (w/h)*(2.9);
cout << "Your hat size is: " << hat << endl;
}
int main(){
getHeight();
getWeight();
getAge();
hatsize();
return 0;
} | 541961ae400acd3f3b5a7d9c97ddf7d4bed900ea | [
"C++"
] | 1 | C++ | hamza-zoumhani/Training1 | 5d596ce8c164e7df796e5dc30c7878e7e4217fa5 | e16a357279de93efff79acfb022f5863321bf83f |
refs/heads/master | <file_sep>import cv2
import numpy as np
import time
import copy
import os
import glob
import bgs
import sys
import multiprocessing as mpr
from datetime import datetime
from kalman_filter import KalmanFilter
from tracker import Tracker
if __name__ == '__main__':
#FPS = 30
#视频中下部到检测线的距离
ROAD_DIST_MILES = 0.025
#速度限制
HIGHWAY_SPEED_LIMIT_MPH = 65
HIGHWAY_SPEED_LIMIT_KMH = 100
history = 100
#背景建模方法
algorithm = bgs.StaticFrameDifference()
#字体
font = cv2.FONT_HERSHEY_PLAIN
centers = []
#速度检测线的Y坐标
Y_THRESH = 400
#远处检测框的大小
blob_min_width_far = 25
blob_min_height_far = 25
#近处检测框的大小
blob_min_width_near = 30
blob_min_height_near = 30
frame_start_time = None
#初始化一个Tracker
tracker = Tracker(80, 3, 2, 1)
cap = cv2.VideoCapture('/home/zxl/文档/speed-detector/TestVideo/t23.mp4')
if (cap.isOpened() == False):
print('视频打开失败!')
sys.exit()
#视频帧宽
frame_width = round(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
#视频FPS
frame_FPS = round(cap.get(cv2.CAP_PROP_FPS))
#视频每帧之间的等待时间
pauseTime = round(1000 / frame_FPS)
print("FPS: ",frame_FPS)
print("pauseTime: ",pauseTime)
while True:
centers = []
frame_start_time = datetime.utcnow()
ret, frame = cap.read()
if ret == False:
break
orig_frame = copy.copy(frame)
#画出检测线
cv2.line(frame, (0, Y_THRESH), (frame_width, Y_THRESH), (0, 139, 139), 2)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray,(5,5,),0)
#hsv = cv2.cvtColor(frame,cv2.COLOR_BGR2HSV)
#frame_H,frame_S,frame_V = cv2.split(hsv)
#frame_B,frame_G,frame_R = cv2.split(frame)
#fgmask = fgbg.apply(gray)
#获取前景掩模
fgmask = algorithm.apply(frame)
# kernel = np.ones((4,4),np.uint8)
# kernel_dilate = np.ones((5,5),np.uint8)
# opening = cv2.morphologyEx(fgmask, cv2.MORPH_OPEN, kernel)
# dilation = cv2.morphologyEx(opening, cv2.MORPH_OPEN, kernel_dilate)
#fgmask = cv2.adaptiveThreshold(fgmask,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,5,0)
#形态学处理
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (5, 5))
erode = cv2.erode(fgmask,kernel)
#erode = cv2.erode(erode, kernel)
dilation = cv2.dilate(erode, kernel,None,None,3)
#dilation = cv2.dilate(dilation, kernel)
#dilation = cv2.dilate(dilation, kernel)
#dilation = cv2.dilate(dilation, kernel)
#寻找轮廓
_, contours, hierarchy = cv2.findContours(dilation, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
#将检测到的汽车中心加入centers
for cnt in contours:
x, y, w, h = cv2.boundingRect(cnt)
#测试
#print("矩形: ","x:", x, "y:", y, "w:" ,w , "h:", h)
if y > Y_THRESH:
if w >= blob_min_width_near and h >= blob_min_height_near:
center = np.array([[x + w / 2], [y + h / 2]])
centers.append(np.round(center))
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 0, 255), 2)
#测试
#cv2.imshow("blob_min_width_near",frame)
#cv2.waitKey(0)
else:
if w >= blob_min_width_far and h >= blob_min_height_far:
center = np.array([[x + w / 2], [y + h / 2]])
centers.append(np.round(center))
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
#测试
#cv2.imshow("blob_min_width_far", frame)
#cv2.waitKey(0)
if centers:
print('centers.length',centers.__len__())
tracker.update(centers)
for vehicle in tracker.tracks:
if len(vehicle.trace) > 1:
for j in range(len(vehicle.trace) - 1):
# 画出跟踪线
x1 = vehicle.trace[j][0][0]
y1 = vehicle.trace[j][1][0]
x2 = vehicle.trace[j + 1][0][0]
y2 = vehicle.trace[j + 1][1][0]
#画出上一矩形与当前帧矩形中心的连线
cv2.line(frame, (int(x1), int(y1)), (int(x2), int(y2)), (255, 0, 0), 2)
#测试
#print("x1:",int(x1),"y1:",int(y1),"x2:",int(x2),"y2:",int(y2))
#print("-----------------------------------------")
try:
trace_i = len(vehicle.trace) - 1
trace_x = vehicle.trace[trace_i][0][0]
trace_y = vehicle.trace[trace_i][1][0]
if trace_y <= Y_THRESH + 5 and trace_y >= Y_THRESH - 5 and not vehicle.passed:
cv2.putText(frame, 'I PASSED!', (int(trace_x), int(trace_y)), font, 1, (0, 255, 255), 1,
cv2.LINE_AA)
vehicle.passed = True
#load_lag为程序运行到这里的开销
load_lag = (datetime.utcnow() - frame_start_time).total_seconds()
print("-------------------------------------------------------")
print("frame_start_time:", frame_start_time,"datetime.utcnow():",datetime.utcnow(),"load_lag",load_lag)
time_dur = (datetime.utcnow() - vehicle.start_time).total_seconds() - load_lag
print("datetime.utcnow():",datetime.utcnow(),"vehicle.start_time:",vehicle.start_time,"load_lag:",load_lag)
print("time_dur",time_dur)
time_dur /= 60
time_dur /= 60
vehicle.mph = ROAD_DIST_MILES / time_dur
vehicle.kmh = vehicle.mph * 1.61
if vehicle.kmh > HIGHWAY_SPEED_LIMIT_KMH:
print('超速了!')
cv2.circle(orig_frame, (int(trace_x), int(trace_y)), 20, (0, 0, 255), 2)
#cv2.putText(orig_frame, 'MPH: %s' % int(vehicle.mph), (int(trace_x), int(trace_y)), font, 1,
# (0, 0, 255), 1, cv2.LINE_AA)
cv2.putText(orig_frame, 'KMH: %s' % int(vehicle.kmh), (int(trace_x), int(trace_y)),
font, 1, (0, 0, 255), 1, cv2.LINE_AA)
cv2.imwrite('speeding_%s.png' % vehicle.track_id, orig_frame)
print('超速照片已保存!')
if vehicle.passed:
cv2.putText(frame, 'KMH: %s' % int(vehicle.kmh), (int(trace_x), int(trace_y)), font, 1,
(0, 255, 255), 1, cv2.LINE_AA)
else:
cv2.putText(frame, 'ID: ' + str(vehicle.track_id), (int(trace_x), int(trace_y)), font, 1,
(255, 255, 255), 1, cv2.LINE_AA)
except:
pass
cv2.imshow('original', frame)
cv2.imshow('opening/erode', erode)
cv2.imshow('opening/dilation', dilation)
cv2.imshow('background subtraction', fgmask)
#等待(1000/FPS)ms
keyboard = cv2.waitKey(pauseTime)
#按下'q'键推出
if keyboard == 27:
break
#按下' '键暂停
if keyboard == 32:
cv2.waitKey(0)
#time.sleep(1.0 / FPS)
#释放资源
cap.release()
cv2.destroyAllWindows()
#删除运行时产生的截图
for file in glob.glob('speeding_*.png'):
os.remove(file)
| cc2435972fdda1b7b6e76d5c7404873c67a75a46 | [
"Python"
] | 1 | Python | limeflavour/speed-detector2 | a56043794e393b4ab7e952b55e8bdc187d3cd399 | 1162764fa4a4288fcd2f7bf92c6f61b1f8549aed |
refs/heads/master | <repo_name>Z4P0/csv-land<file_sep>/readme.txt
update config.py to point to the proper locations
then run:
$ python cmi-csv-import
<file_sep>/config.py
CSV_FILE = "cmi-sample.csv"
OUTPUT_FILE = "cmi-sample.json"
| c5e2d252ac5fb61474a69fd17239ef7dd419e5df | [
"Python",
"Text"
] | 2 | Text | Z4P0/csv-land | 461f3afba1568641304c9c5e198db18e1448a481 | c54cded561373b4969bef5b6a57d4eef9619efd6 |
refs/heads/master | <repo_name>franaln/pyslides<file_sep>/aur/PKGBUILD
_pkgname=pyslides
pkgname=pyslides-git
pkgver=20180913
pkgrel=1
pkgdesc=""
arch=('any')
url="https://github.com/franaln/pyslides"
license=('GPL')
depends=('python')
makedepends=('git')
provides=(pyslides)
conflicts=(pyslides)
source=("git://github.com/franaln/pyslides.git")
md5sums=('SKIP')
pkgver() {
date +%Y%m%d
}
package() {
cd "${srcdir}/${_pkgname}"
python setup.py -q install --root="$pkgdir" --optimize=1
}
<file_sep>/pyslides/__init__.py
from pyslides.pyslides import *
<file_sep>/pyslides/pyslides.py
def bdoc():
print(r"""\documentclass[center,10pt,cm,aspectratio=169]{beamer}
\usetheme{minicasual}
""")
def bdoc():
print(r"""\begin{document}
\centering
""")
def edoc():
print(r"\end{document}")
def bframe(title=''):
print()
print(r'\begin{frame}{%s}' % title)
print()
def eframe():
print()
print(r'\end{frame}')
print()
def spc():
print()
def img(width, path):
print(r'\includegraphics[width=%s\textwidth]{%s}' % (width, path))
<file_sep>/setup.py
from setuptools import setup
setup(
name='pyslides',
version='0.1',
description='',
url='https://github.com/franaln/pyslides',
author='<NAME>',
author_email='<EMAIL>',
license='',
packages=['pyslides',],
)
| efc136d8a8b9c256b4d354083fd6fd5dff3c100c | [
"Python",
"Shell"
] | 4 | Shell | franaln/pyslides | 4f5554b080d0eaed6557616c7ef4d2659da4d1e2 | f9d74b55d399c2bdc03ecf0df53a043c9a6c2525 |
refs/heads/master | <repo_name>youssefmaged95/Kinect2-Motion-Anaylser<file_sep>/src/components/pages/KinectPreviewPage/KinectNotFound.js
import React, { Component } from 'react';
import { Icon } from 'antd';
import './KinectPreviewPage.css';
class KinectNotFound extends Component {
render() {
return (
<div>
<Icon style={{ marginTop: window.innerHeight / 4 }} type="loading" className="loader" spin />
<p className="not-found-error" >Waiting For Kinect </p>
</div>
);
}
}
export default KinectNotFound;<file_sep>/src/components/Header/Header.js
import React, { Component } from 'react';
import './Header.css';
var datasetSelector = null;
class Header extends Component {
constructor() {
super();
this.state = {
screenWidth: 0,
datasetName: "",
};
this.updateWindowDimensions = this.updateWindowDimensions.bind(this);
this.updateDataset = this.updateDataset.bind(this);
}
updateWindowDimensions() {
this.setState({ screenWidth: window.innerWidth });
}
componentWillUnmount() {
window.removeEventListener('resize', this.updateWindowDimensions);
}
componentDidMount() {
this.updateWindowDimensions();
window.addEventListener('resize', this.updateWindowDimensions);
}
updateDataset(datasetName) {
this.setState({ datasetName: datasetName });
}
render() {
if (this.props.selectorVisible) {
datasetSelector = (
<a onClick={() => this.props.chooseDatasetModal.open()} >
<div className="datasetSelector" href="#">{this.state.datasetName}</div>
</a>
);
}
return (
<div style={{ width: this.state.screenWidth - 70 }} className="header" >
<p className="title" >{this.props.title}</p>
{datasetSelector}
</div>
);
}
}
export default Header;<file_sep>/src/components/Analysers/LiveAnalyser.js
import React, { Component } from 'react';
import './Graphs.css';
import { Line, defaults } from 'react-chartjs-2';
defaults.global.animation = false;
const divs = 100;
class LiveAnalyser extends Component {
constructor() {
super();
let emptyLables = [];
let zeroValues = [];
for (let i = 0; i < divs; i++) {
emptyLables.push("");
zeroValues.push(0);
}
this.state = {
labels: emptyLables,
datasets: [{
label: 'Motion Level',
data: zeroValues,
fill: true,
lineTension: 0.1,
backgroundColor: 'rgba(0,0,0,0)',
borderColor: '#6355e096',
borderCapStyle: 'butt',
borderDash: [],
borderDashOffset: 0.0,
pointBackgroundColor: '#fff',
pointRadius: 0,
pointHitRadius: 0,
}]
}
this.addData = this.addData.bind(this);
}
componentDidMount() {
// this.addData(0);
}
getRandomInt(max) {
let n = Math.floor(Math.random() * Math.floor(max));
// console.log(n);
return n;
}
addData(n) {
let newData = this.state.datasets
let dataFile = newData[0].data;
dataFile.push(n);
newData[0].data = dataFile.slice(Math.max(dataFile.length - divs, 1))
// console.log(newData);
this.setState({ datasets: newData });
}
render() {
return (
<div className="card">
<Line height={368} width={770} data={this.state}
options={{
maintainAspectRatio: true,
scales: {
yAxes: [{
id: 'left-y-axis',
type: 'linear',
position: 'left',
ticks: {
min: 0,
max: 2.5,
}
}]
}
}} />
</div>
);
}
}
export default LiveAnalyser;<file_sep>/src/components/pages/KinectPreviewPage/KinectPreviewPage.js
import React, { Component } from 'react';
import Header from '../../Header/Header';
import JointAnimator from './JointAnimator';
import KinectNotFound from './KinectNotFound';
import ActionBar from './ActionBar';
import ChooseDatasetModal from './ChooseDatasetModal';
import LiveAnalyser from '../../Analysers/LiveAnalyser';
import SnatchAnalyser from '../../Analysers/SnatchAnalyser';
import { Row, notification } from 'antd';
import { getAllDatasets } from '../../../firebase/firestore';
import Steps from './Steps';
import './KinectPreviewPage.css';
const io = require('socket.io-client');
var content;
var recordedMotion;
class KinectPreviewPage extends Component {
constructor(props) {
super(props);
recordedMotion = [];
this.state = {
kinectIsConnected: false,
isRecording: false,
isPlaying: false,
recordedFrames: 0,
workingDataset: {},
};
this.startConnection = this.startConnection.bind(this);
this.toggleRecording = this.toggleRecording.bind(this);
this.deleteRecordedMotion = this.deleteRecordedMotion.bind(this);
this.saveRecordedMotion = this.saveRecordedMotion.bind(this);
}
componentDidMount() {
this.startConnection();
getAllDatasets((res) => {
if (res.success) {
this.setState({ datasets: res.datasets });
}
});
}
startConnection = () => {
// Connect to kinect server
var socket = io.connect('127.0.0.1:8000');
socket.on('bodyFrame', function (bodyFrame) {
if (!this.state.kinectIsConnected) {
this.setState({ kinectIsConnected: true });
notification['success']({
message: 'Kinect Detected',
duration: 3
});
}
// Save body frame if user is recording
if (this.state.isRecording) {
this.setState({ recordedFrames: this.state.recordedFrames + 1 });
recordedMotion.push(bodyFrame);
}
if (!this.state.isPlaying) {
// Draw new body frame
this.refs.jointAnimator.drawBodyFrame(bodyFrame);
// Analayse body frame
this.refs.moveAnalyser.addFrame(bodyFrame);
}
}.bind(this));
}
toggleRecording = () => {
if (this.state.isRecording) {
this.setState({ isRecording: false, isPlaying: true });
this.refs.jointAnimator.playMotion(recordedMotion);
} else {
recordedMotion = [];
this.setState({ isRecording: true, isPlaying: false });
}
}
deleteRecordedMotion = () => {
// clear recorded motion array and frame count
recordedMotion = [];
// Stop motion player
this.setState({ isPlaying: false, recordedFrames: 0 });
}
saveRecordedMotion = () => {
if (this.refs.saveModal.requestSave(recordedMotion));
}
render() {
if (this.state.kinectIsConnected) {
content = (
<div>
<JointAnimator ref="jointAnimator"
playMotion={this.state.isPlaying}
title={this.state.isPlaying ? "Replay" : "Live Preview"} />
<div className="live-displacement">
<Steps ref="steps" />
<LiveAnalyser ref="liveGraphA" />
</div>
<SnatchAnalyser
steps={this.refs.steps}
ref="moveAnalyser"
liveGraphA={this.refs.liveGraphA}
recorder={this}
/>
</div>
)
} else {
content = (
<div>
{/* <JointAnimator ref="jointAnimator"
playMotion={this.state.isPlaying}
title={this.state.isPlaying ? "Replay" : "Live Preview"} />
<div className="live-displacement">
<Steps />
<LiveAnalyser ref="liveGraphA" />
</div>
<SnatchAnalyser
ref="moveAnalyser"
liveGraphA={this.refs.liveGraphA}
/> */}
<KinectNotFound />
</div>
)
}
return (
<Row type="flex" justify="center">
<Header ref="header" title="Kinect Live Preview" />
<ActionBar
disabled={!this.state.kinectIsConnected}
isRecording={this.state.isRecording}
frameCount={this.state.recordedFrames}
onRecordingToggled={this.toggleRecording}
onDelete={this.deleteRecordedMotion}
onSave={this.saveRecordedMotion} />
{content}
<ChooseDatasetModal ref='chooseDatasetModal' header={this.refs.header} animator={this.refs.jointAnimator} />
</Row>
);
}
}
export default KinectPreviewPage;<file_sep>/src/components/pages/KinectPreviewPage/JointAnimator.js
import React, { Component } from 'react';
var ctx;
var canvas;
let activeJointColor = '#e03162';
// let inActiveJointColor = '#ddd';
// let otherColor = '#6355e0';
let width = 400;
let height = 333;
var bodyFrame;
let selectedJoints = [];
class JointAnimator extends Component {
constructor() {
super();
this.state = {
selectedJoints: [0, 1, 2, 3],
currentJoint: 9,
}
}
componentDidMount() {
canvas = document.getElementById('bodyCanvas');
ctx = canvas.getContext('2d');
this.playMotion = this.playMotion.bind(this);
this.drawBodyFrame = this.drawBodyFrame.bind(this);
}
setSelectedJoints(joints) {
selectedJoints = joints;
console.log(selectedJoints);
}
drawBodyFrame = (bodyFrame) => {
ctx.clearRect(0, 0, canvas.width, canvas.height);
bodyFrame.bodies.forEach(function (body) {
for (let jointType in body.joints) {
// if (other.includes(parseInt(jointType))) {
let joint = body.joints[jointType];
ctx.fillStyle = activeJointColor;
ctx.fillRect(joint.depthX * width, joint.depthY * height, 4, 4);
// }
// else if (selectedJoints.includes(parseInt(jointType))) {
// let joint = body.joints[jointType];
// ctx.fillStyle = activeJointColor;
// ctx.fillRect(joint.depthX * width, joint.depthY * height, 6, 6);
// } else {
// let joint = body.joints[jointType];
// ctx.fillStyle = inActiveJointColor;
// ctx.fillRect(joint.depthX * width, joint.depthY * height, 6, 6);
// }
}
});
}
playMotion = (motion, frame) => {
let that = this;
if (frame === undefined) {
alert("Start Playing");
this.setState({ isPlaying: true });
frame = 0;
}
if (frame === motion.length - 1) {
if (this.props.playMotion) {
frame = 0;
} else {
return;
}
}
bodyFrame = motion[frame];
if (this.props.playMotion) {
that.drawBodyFrame(bodyFrame);
}
setTimeout(function () {
return that.playMotion(motion, frame + 1);
}, 30);
}
render() {
return (
<div title={this.props.title} className="joint-animator" >
<canvas id="bodyCanvas"
width={width.toString()}
height={height.toString()} />
</div>
);
}
}
export default JointAnimator;<file_sep>/src/components/pages/UserPage/UserPage.js
import React, { Component } from 'react';
import Header from '../../Header/Header';
import { Button, Row } from 'antd';
import NewUserForm from './NewUserForm';
import { getAllUsers } from '../../../firebase/firestore';
import './UserPage.css';
var content;
class UserPage extends Component {
constructor() {
super();
this.state = {
users: [],
}
this.getAllUsers = this.getAllUsers.bind(this);
}
componentDidMount() {
this.getAllUsers();
}
getAllUsers() {
getAllUsers((res) => {
if (res.success) {
this.setState({ users: res.users });
}
});
}
onCreateUser() {
this.getAllUsers();
}
render() {
if (this.state.users) {
content = (
this.state.users.map((item, index) => (
<a href={"/#/users/" + item.id + "/"} ><div key={index} className="user">
<img alt="" src={item.data.info.profile_picture} className="user-pic" />
<p className="user-name" >{item.data.info.name}</p>
{/* <div className="save-button" >{item.data.name}</div> */}
</div></a>
)))
}
return (
<div>
<Header title="Users" selectorVisible={false} />
<NewUserForm ref="newUserForm" onCreateUser={this.getAllUsers} />
<Button onClick={() => this.refs.newUserForm.setVisible(true)} type="danger" className="new-dataset-button">Create New
</Button>
<Row type="flex" justify="center" className="datasets-row" style={{ marginTop: 20, }}>
{content}
</Row>
</div>
);
}
}
export default UserPage;<file_sep>/src/firebase/firestore.js
import { firestore, storage } from './init';
// Fetches all datasets from the database ( with motion model references )
function getAllDatasets(callback) {
firestore.collection("datasets").get()
.then(function (collection) {
let datasets = [];
collection.forEach(function (doc) {
datasets.push({
id: doc.id,
data: doc.data(),
});
});
callback({ 'success': true, 'datasets': datasets });
})
.catch(function (error) {
callback({ 'success': false, 'msg': error });
});
}
// Creates a new dataset
function createDataset(datasetInfo, callback) {
firestore.collection("datasets").add(datasetInfo)
.then(function (docRef) {
callback({ 'success': true, 'id': docRef.id });
})
.catch(function (error) {
callback({ 'success': false, 'msg': error });
});
}
// Uploads a new motion model to its relevant dataset folder
function uploadMotionModel(datasetId, motionModel, callback) {
compressMotionModel(motionModel, (compressed) => {
let now = Date.now();
let fileName = now + ".json";
let fileRef = storage.ref().child(datasetId + '/' + fileName);
let content = JSON.stringify({ motionModel: compressed });
fileRef.putString(content).then((snapshot) => {
if (snapshot.state === "success") {
let newMotionModel = {
file_name: fileName,
date_created: Date.now(),
frame_count: motionModel.length
};
addMotionModelToDataset(datasetId, newMotionModel, (res) => {
callback(res);
});
} else {
callback({ 'success': false, 'msg': "Failed to upload motion model." });
}
});
});
}
// Adds a new motion model reference to its relevant dataset
function addMotionModelToDataset(datasetId, newMotionModel, callback) {
firestore.collection("datasets").doc(datasetId).get()
.then(function (doc) {
let motionModels = doc.data().motion_models;
motionModels.push(newMotionModel);
firestore.collection("datasets").doc(datasetId).update({ motion_models: motionModels })
.then(function (docRef) {
callback({ 'success': true });
})
.catch(function (error) {
callback({ 'success': false, 'msg': error });
});
})
.catch(function (error) {
callback({ 'success': false, 'msg': error });
});
}
// Converts a motion model to an array of frames ( smaller file size )
function compressMotionModel(motionModel, callback) {
let newMotionModel = [];
let jointsPosition = [];
let newJointsPosition = {};
motionModel.forEach(function (element) {
element.bodies.forEach(function (elem) {
if (elem.joints) {
jointsPosition = elem.joints;
newJointsPosition = {};
jointsPosition.forEach(function (position, i) {
newJointsPosition["" + i] = {
x: position.depthX,
y: position.depthY,
// z: position.depthZ,
};
});
newMotionModel.push(newJointsPosition);
}
});
});
callback(newMotionModel);
}
// Creates a new user
function createUser(name, callback) {
firestore.collection("users").add({
info: {
name: name,
profile_picture: "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTzaLMnex1QwV83TBQgxLTaoDAQlFswsYy62L3mO4Su-CMkk3jX",
},
statistics: {
}
})
.then(function (docRef) {
callback({ 'success': true, 'id': docRef.id });
})
.catch(function (error) {
callback({ 'success': false, 'msg': error });
});
}
// Fetches all users from the database
function getAllUsers(callback) {
firestore.collection("users").get()
.then(function (collection) {
let users = [];
collection.forEach(function (doc) {
users.push({
id: doc.id,
data: doc.data(),
});
});
callback({ 'success': true, 'users': users });
})
.catch(function (error) {
callback({ 'success': false, 'msg': error });
});
}
export {
getAllDatasets, createDataset, uploadMotionModel, createUser, getAllUsers
};
// ================================================ Testing Functions =====================================================
// getAllDatasets((res) => {
// console.log(res);
// });
// createDataset("Default Dataset", (res) => {
// console.log(res);
// });
// updateDatasetMotionModels('lpAKm07bVbXi0DQc8ftt', (res) => {
// console.log(res);
// });
// deleteMoionModel('2FVAOMXi7cLdlEv646vQdO');
// // Fake motion model
// let frames = 150;
// let motionModel = [];
// for (let i = 0; i < frames; i++) {
// let bodyFrame = {};
// for (let j = 0; j < 25; j++) {
// bodyFrame["" + j] = {
// x: 1,
// y: 2,
// z: 3
// }
// }
// motionModel.push(bodyFrame);
// if (i === (frames - 1)) {
// // createMotionModel('H1kTpMro5AHU4TU7OuqB', { motionModel: motionModel }, (res) => {
// // console.log(res);
// // });
// // }
// console.log(motionModel);
// }
// }
<file_sep>/src/components/Analysers/SnatchAnalyser.js
import React, { Component } from 'react';
import { getYDistance, getMovementLevel } from '../Functions/FeatureExtractor';
let motion = [];
let barAnkleYDists = [];
class SnatchAnalyser extends Component {
constructor() {
super();
this.state = {
step: 0,
}
this.getBarLevel = this.getBarLevel.bind(this);
this.updateSnatchStep = this.updateSnatchStep.bind(this);
}
addFrame = (bodyFrame) => {
let newBodyFrame = {};
bodyFrame.bodies.forEach(function (body) {
for (let jointType in body.joints) {
let joint = body.joints[jointType];
newBodyFrame[jointType] = joint;
}
});
if (newBodyFrame[0] && newBodyFrame[0].cameraY) {
motion.push(newBodyFrame);
this.getMovementLevel();
this.updateSnatchStep(newBodyFrame);
}
}
updateSnatchStep(bodyFrame) {
switch (this.state.step) {
case 0: this.checkStance(bodyFrame); break;
case 1: this.checkFirstPull(bodyFrame); break;
case 2: this.checkSecondPull(bodyFrame); break;
case 3: this.checkTurnover(bodyFrame); break;
}
}
checkStance(bodyFrame) {
let dist = getYDistance(bodyFrame[18], bodyFrame[10]);
barAnkleYDists.push(dist);
let l = barAnkleYDists.length;
let correctStance = true;
if (l >= 50) {
for (let i = 0; i < 50; i++) {
if (barAnkleYDists[l - i - 1] >= 0.3) {
correctStance = false;
break;
}
if (correctStance) {
this.props.steps.setStep(1);
this.setState({ step: 1 });
}
}
}
}
checkFirstPull(bodyFrame) {
if (bodyFrame[10].cameraY > bodyFrame[16].cameraY) {
this.props.steps.setStep(2);
this.setState({ step: 2 });
}
return;
}
checkSecondPull(bodyFrame) {
if (bodyFrame[10].cameraY > bodyFrame[8].cameraY) {
this.props.steps.setStep(3);
this.setState({ step: 3 });
}
return;
}
checkTurnover(bodyFrame) {
let dist = getYDistance(bodyFrame[0], bodyFrame[18]);
if (dist < 0.37) {
this.props.steps.setStep(4);
this.setState({ step: 4 });
}
return;
}
getBarLevel = (bodyFrame) => {
let dist = getYDistance(bodyFrame[6], bodyFrame[10]);
return dist;
}
getMovementLevel() {
let l = motion.length;
if (l > 1) {
let diff = getMovementLevel(motion[l - 1], motion[l - 2]);
// console.log(diff);
if (this.props.liveGraphA) {
this.props.liveGraphA.addData(diff);
}
}
}
render() {
return (
<div>
</div>
);
}
}
export default SnatchAnalyser;<file_sep>/src/components/pages/DatasetsPage/DatasetsPage.js
import React, { Component } from 'react';
import Header from '../../Header/Header';
import { getAllDatasets } from '../../../firebase/firestore';
import NewDatasetModal from './NewDatasetModal';
import { Row, Button } from 'antd';
import './DatasetsPage.css';
let folder = require('../../../assets/folder.png');
let content = null;
class DatasetsPage extends Component {
constructor() {
super();
this.state = { datasets: [] };
this.addDataset = this.addDataset.bind(this);
}
componentDidMount() {
getAllDatasets((res) => {
if (res.success) {
this.setState({ datasets: res.datasets });
}
});
}
addDataset(datasetName) {
let newDatasets = this.state.datasets;
newDatasets.push({
data: {
name: datasetName,
motion_models: [],
}
});
this.setState({ datasets: newDatasets });
}
render() {
if (this.state.datasets) {
content = (
this.state.datasets.map((item, index) => (
<div key={index} className="dataset">
<img alt="" src={folder} className="folderIcon" />
<p className="dataset-name" >{item.data.name}</p>
<p className="motion-model-count" >{item.data.motion_models.length} Motion Models</p>
{/* <div className="save-button" >{item.data.name}</div> */}
</div>
)))
}
return (
<div >
<Header title="Datasets" />
<Button onClick={() => this.refs.newDatasetModal.setVisible(true)} type="danger" className="new-dataset-button">Create New
</Button>
<Row type="flex" justify="center" className="datasets-row" style={{ marginTop: 60, }}>
{content}
</Row>
<NewDatasetModal ref="newDatasetModal" onCreateDataset={(datasetName) => this.addDataset(datasetName)} />
</div >
);
}
}
export default DatasetsPage;
<file_sep>/src/components/pages/KinectPreviewPage/ActionBar.js
import React, { Component } from 'react';
import { Row, Button } from 'antd';
import './KinectPreviewPage.css';
let saveButtons;
class ActionBar extends Component {
constructor() {
super();
this.state = {
isRecording: false,
}
}
toggleRecording = () => {
this.props.onRecordingToggled();
this.setState({ isRecording: !this.state.isRecording });
}
render() {
if (!this.state.isRecording && this.props.frameCount > 0) {
saveButtons =
<div style={{ height: 35 }}>
<a onClick={() => this.props.onSave()}><div className="button"> Save</div></a>
<a onClick={() => this.props.onDelete()}><div className="button" >Delete</div></a>
</div>
} else {
saveButtons = null;
}
return (
<Row type="flex" justify="start" className="action-bar" >
<Button onClick={() => this.toggleRecording()} type="danger"
className="record-button"
disabled={this.props.disabled || (this.props.frameCount > 0 && !this.state.isRecording)}>
{this.state.isRecording || this.props.frameCount > 0 ? this.props.frameCount + " Frames" : "Record"}
</Button>
{saveButtons}
</Row >
);
}
}
export default ActionBar;<file_sep>/src/components/pages/DatasetsPage/NewDatasetModal.js
import React, { Component } from 'react';
import { Modal, Row, Button, notification, Icon } from 'antd';
import { createDataset } from '../../../firebase/firestore';
import BodyView from './BodyView';
var content = null;
class NewDatasetModal extends Component {
constructor() {
super();
this.state = { visible: false, datasetName: "", isCreating: false };
this.handleChange = this.handleChange.bind(this);
this.createDataset = this.createDataset.bind(this);
}
componentDidMount() {
}
setVisible(visiblity) {
this.setState({ visible: visiblity });
}
handleChange(text) {
let name = text.target.value;
this.setState({ datasetName: name });
}
createDataset() {
this.setState({ isCreating: true });
let datasetInfo = this.refs.bodyView.getSelectedJoints();
datasetInfo.name = this.state.datasetName;
datasetInfo.motion_models = [];
createDataset(datasetInfo, (res) => {
if (res.success) {
notification['success']({
message: 'Successfully Created Dataset',
duration: 3,
});
this.props.onCreateDataset(this.state.datasetName);
this.setVisible(false);
} else {
notification['error']({
message: 'Failed To Creat Dataset',
duration: 3,
});
}
this.setState({ isCreating: false });
});
}
render() {
if (this.state.isCreating) {
content = (
<div>
<Icon style={{ marginTop: 35 }} type="loading" className="loader" spin />
<p className="not-found-error" >Creating Dataset</p>
</div>
)
} else {
content = (
<div>
<input onChange={(e) => this.handleChange(e)} style={{ marginTop: 15, }} className="new-dataset-input" placeholder="Dataset Name" />
<BodyView ref="bodyView" />
<Button onClick={() => this.createDataset()} style={{ marginTop: 20, }} type="danger" className="save-dataset-button">
Save
</Button>
</div>
);
}
return (
<Modal
visible={this.state.visible}
footer={null}
onCancel={() => {
if (!this.state.isCreating) {
this.setVisible(false);
}
}}
>
<Row type="flex" justify="center" className="datasets-row" style={{ marginTop: 10, }}>
{content}
</Row>
</Modal>
);
}
}
export default NewDatasetModal;<file_sep>/src/components/pages/KinectPreviewPage/Steps.js
import React, { Component } from 'react';
import { Steps } from 'antd';
const Step = Steps.Step;
class moveSteps extends Component {
constructor() {
super();
this.state = {
step: 0,
}
this.setStep = this.setStep.bind(this);
}
setStep(n) {
this.setState({ step: n });
}
render() {
return (
<div className="card move-steps">
<Steps current={this.state.step}>
<Step title="Stance" />
<Step title="First Pull" />
<Step title="Second Pull" />
<Step title="Turnover" />
<Step title="Finish" />
</Steps>
</div>
);
}
}
export default moveSteps;<file_sep>/src/router.js
import React, { Component } from 'react';
import { HashRouter, Route } from 'react-router-dom'
import DatasetsPage from './components/pages/DatasetsPage/DatasetsPage';
import KinectView from './components/pages/KinectPreviewPage/KinectPreviewPage';
import UserPage from './components/pages/UserPage/UserPage';
import Navigator from './components/Navigator/Navigator'
class RouterConfig extends Component {
render() {
return (
<HashRouter>
<div className="App">
<Navigator />
{/* <Switch> */}
<div style={{ marginLeft: 70, }}>
<Route path="//" component={KinectView} />
<Route path="/datasets" component={DatasetsPage} />
<Route path="/users" component={UserPage} />
</div>
{/* </Switch> */}
</div>
</HashRouter>
);
}
}
export default RouterConfig; | 691a1110d81c8e8fc47d10b2b94847b3414be41f | [
"JavaScript"
] | 13 | JavaScript | youssefmaged95/Kinect2-Motion-Anaylser | 07cbc7811315ab5568400ce02a30f6476418d50e | 7d3f1b9e4a0486e2c486dc783d309cc2075848fb |
refs/heads/master | <file_sep>import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { AddressesAddComponent } from './addresses-add/addresses-add.component';
import { AddressesDetailsComponent } from './addresses-details/addresses-details.component';
import { AddressesDetailsResolver } from './addresses-details/addresses-details.resolver';
import { AddressesComponent } from './addresses.component';
import { AddressesResolver } from './addresses.resolver';
const routes: Routes = [
{
path: '',
children: [
{
path:'',
redirectTo: '/addresses/list',
pathMatch: 'full',
},
{
path: 'list',
component: AddressesComponent,
resolve: {
addressesList: AddressesResolver,
}
},
{
path: 'add',
component: AddressesAddComponent,
resolve: {
addressesList: AddressesResolver,
}
},
{
path: 'details/:id',
component: AddressesDetailsComponent,
resolve: {
addressDetails: AddressesDetailsResolver,
}
},
]}]
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule],
providers: [
AddressesResolver,
AddressesDetailsResolver
]
})
export class AddressesRoutingModule {
}<file_sep>import { Component, OnInit } from '@angular/core';
import { FormControl, FormGroup, Validators } from '@angular/forms';
import { Contact } from '../clients/models/contact';
@Component({
selector: 'app-contact',
templateUrl: './contact.component.html',
styleUrls: ['./contact.component.scss']
})
export class ContactComponent implements OnInit {
contact: Contact = null;
form: FormGroup;
constructor() { }
ngOnInit(): void {
this.form = new FormGroup({
"name": new FormControl(this.contact?.name, [Validators.required, Validators.minLength(2), Validators.maxLength(25), Validators.pattern("[a-zA-Z]*")]),
"lastName": new FormControl(this.contact?.lastName,[Validators.required, Validators.minLength(3), Validators.maxLength(40), Validators.pattern("[a-zA-Z]*")]),
"phoneNumber": new FormControl(this.contact?.phoneNumber,[ Validators.minLength(9), Validators.maxLength(12), Validators.pattern("[0-9]*")]),
"email": new FormControl(this.contact?.email,[Validators.required, Validators.minLength(5), Validators.maxLength(35), Validators.pattern("^[a-z0-9._%+-]+@[a-z0-9.-]+\.[a-z]{2,4}$")]),
"subject": new FormControl(this.contact?.subject,[Validators.required, Validators.minLength(9), Validators.maxLength(15),]),
});
};
required(propName: string): boolean {
return (
this.form.get(propName)?.hasError('required') &&
this.form.get(propName).touched &&
this.form.get(propName).dirty
);
}
minLength(propName: string): boolean {
return (
this.form.get(propName)?.hasError('minlength') &&
this.form.get(propName).touched &&
this.form.get(propName).dirty
);
}
maxLength(propName: string): boolean {
return (
this.form.get(propName)?.hasError('maxlength') &&
this.form.get(propName).touched &&
this.form.get(propName).dirty
);
}
pattern(propName: string): boolean {
return (
this.form.get(propName)?.hasError('pattern') &&
this.form.get(propName).touched &&
this.form.get(propName).dirty
);
}
}<file_sep>import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { HeaderComponent } from './header/header.component';
import { FooterComponent } from './footer/footer.component';
import { AlertModule } from 'ngx-bootstrap/alert';
import { FormsModule } from '@angular/forms';
import { ReactiveFormsModule } from '@angular/forms';
import { ToastrModule } from 'ngx-toastr';
import { AboutComponent } from './about/about.component';
import { NotFoundComponent } from './not-found/not-found.component';
import { ClientsModule } from './clients/clients.module';
import { NgbModule } from '@ng-bootstrap/ng-bootstrap';
import { AddressesService } from './addresses/addresses.service';
import { AddressesModule } from './addresses/addresses.module';
import { ContactComponent } from './contact/contact.component';
import { FontAwesomeModule } from '@fortawesome/angular-fontawesome';
import { OrdersModule } from './orders/orders.module';
import { HomeComponent } from './home/home.component';
import { InProgressComponent } from './inProgress/inProgress.component';
import { LoginPanelComponent } from './loginPanel/loginPanel.component';
import { SignUpPanelComponent } from './signUpPanel/signUpPanel.component';
import { SignUpService } from './signUpPanel/signUp.service';
import { JwPaginationModule } from 'jw-angular-pagination';
import { PaginationComponent } from './shared/pagination/pagination.component';
@NgModule({
declarations: [
AppComponent,
HeaderComponent,
FooterComponent,
AboutComponent,
NotFoundComponent,
ContactComponent,
HomeComponent,
InProgressComponent,
LoginPanelComponent,
SignUpPanelComponent,
// PaginationComponent
],
imports: [
BrowserModule,
BrowserAnimationsModule,
CommonModule,
AppRoutingModule,
FormsModule,
AlertModule.forRoot(),
ReactiveFormsModule,
ToastrModule.forRoot(),
ClientsModule,
NgbModule,
AddressesModule,
FontAwesomeModule,
OrdersModule,
JwPaginationModule
],
providers: [
AddressesService,
SignUpService,
],
bootstrap: [AppComponent]
})
export class AppModule { }
<file_sep>import { Injectable } from '@angular/core';
import { ActivatedRouteSnapshot, Resolve, RouterStateSnapshot } from '@angular/router';
import { Observable } from 'rxjs';
import { Address, AddressWithResidents } from 'src/app/clients/models/address';
import { AddressesService } from '../addresses.service';
@Injectable()
export class AddressesDetailsResolver implements Resolve<AddressWithResidents>{
constructor(private addressesService: AddressesService) {}
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<AddressWithResidents>{
const id: string = route.paramMap.get('id');
return this.addressesService.getAddressWithResidents(id);
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { faTrash, faInfo, faPlusSquare } from '@fortawesome/free-solid-svg-icons';
import { ToastrService } from 'ngx-toastr';
import { Address } from '../clients/models/address';
import { MyPager, OrderedItem } from '../shared/models/shared.models';
import { AddressesService } from './addresses.service';
@Component({
selector: 'app-addresses',
templateUrl: './addresses.component.html',
styleUrls: ['./addresses.component.scss']
})
export class AddressesComponent implements OnInit {
addressesList: Address[] = [];
orderedAddresses: OrderedItem[];
id: number = 1;
faTrash = faTrash;
faInfo = faInfo;
faPlusSquare = faPlusSquare;
constructor(
private toastr: ToastrService,
private route: ActivatedRoute,
private addressService: AddressesService
) {
}
ngOnInit(): void {
this.route.data.subscribe(data =>
this.addressesList = data['addressesList']);
console.log(this.addressesList);
}
addressDelete(addressId:string){
this.addressService.delete(addressId)
.subscribe(response => {
let index = this.addressesList.findIndex(address => address.id == addressId)
this.addressesList.splice(index ,1);
});
}
onPageChanged(event: MyPager) {
this.orderedAddresses = event.pageOfItems;
}
}<file_sep>import { Component, EventEmitter, Input, OnInit, Output } from '@angular/core';
import { FormControl, FormGroup, Validators } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import { faArrowLeft, faSave, faUserAlt } from '@fortawesome/free-solid-svg-icons';
import { ToastrService } from 'ngx-toastr';
import { AddressesService } from 'src/app/addresses/addresses.service';
import { MyFormGroup } from 'src/app/shared/extentions/myFormGroup';
import { ClientsService } from '../clients.service';
import { AddressForCreation, Country } from '../models/address';
import { Customer } from '../models/customer';
import { CustomerForCreation } from '../models/customerForCreation';
@Component({
selector: 'app-edit-customer',
templateUrl: './edit-customer.component.html',
styleUrls: ['./edit-customer.component.scss'],
})
export class EditCustomerComponent implements OnInit {
// @Input("newCustomerInput") newCustomer: Customer;
@Output() addNewCustomerEventEmitter = new EventEmitter<Customer>();
copyOfCustomer: Customer;
isDisabled: boolean = true;
addingMode: boolean = false;
faSave = faSave;
faUserAlt = faUserAlt;
faArrowLeft = faArrowLeft;
address: AddressForCreation = {
countryId: '',
city: '',
street: '',
zipCode: '',
};
form: MyFormGroup = null;
customerExist: boolean = true;
countries: Country[] = [];
polandId: string;
constructor(
private route: ActivatedRoute,
private clientsService: ClientsService,
private addressService: AddressesService,
private toastr: ToastrService
) {
this.route.data.subscribe((value) => {
this.copyOfCustomer = value['customer'];
console.log('in component: ', this.copyOfCustomer);
if (this.copyOfCustomer.id === '0') {
this.addingMode = true;
} else if (this.copyOfCustomer === null) {
this.customerExist = false;
}
});
this.addressService.getAllCountries().subscribe((response) => {
this.countries = response;
console.log('countires', this.countries);
this.polandId = this.countries.find((x) => x.name === 'Poland')?.id;
});
this.form = new MyFormGroup({
name: new FormControl(this.copyOfCustomer.name, [
Validators.required,
Validators.minLength(2),
Validators.maxLength(30),
Validators.pattern('[a-zA-Z ]*'),
]),
lastName: new FormControl(this.copyOfCustomer.lastName, [
Validators.required,
Validators.minLength(3),
Validators.maxLength(30),
]),
age: new FormControl(this.copyOfCustomer.age, [
Validators.required,
Validators.maxLength(3),
Validators.pattern('[0-9]*'),
]),
address: new FormGroup({
countryId: new FormControl(this.copyOfCustomer.address?.country.id, [
Validators.required,
]),
zipCode: new FormControl(this.copyOfCustomer.address?.zipCode, [
Validators.required,
Validators.minLength(4),
Validators.maxLength(30),
]),
city: new FormControl(this.copyOfCustomer.address?.city, [
Validators.required,
Validators.minLength(3),
Validators.maxLength(30),
]),
street: new FormControl(this.copyOfCustomer.address?.street, [
Validators.required,
Validators.minLength(3),
Validators.maxLength(40),
]),
}),
gender: new FormControl(this.copyOfCustomer.gender, [
Validators.required,
]),
phoneNumber: new FormControl(this.copyOfCustomer.phoneNumber, [
Validators.required,
Validators.minLength(9),
Validators.maxLength(15),
]),
email: new FormControl(this.copyOfCustomer.email, [
Validators.required,
Validators.minLength(5),
Validators.maxLength(35),
Validators.pattern('^[a-z0-9._%+-]+@[a-z0-9.-]+.[a-z]{2,4}$'),
]),
});
}
ngOnInit() {}
saveCustomer() {
if(this.copyOfCustomer.id !== '0'){
console.log('first step saving update', this.form.value);
let customerToCreate: CustomerForCreation = {
name: this.form.value.name,
lastName: this.form.value.lastName,
age: this.form.value.age,
countryId: this.form.value.address.countryId,
city: this.form.value.address.city,
zipCode: this.form.value.address.zipCode,
street: this.form.value.address.street,
phoneNumber: this.form.value.phoneNumber,
email: this.form.value.email,
gender: this.form.value.gender,
};
let isVary = this.checkCompliance();
console.log("nasz wynik:", isVary);
if(isVary){
console.log('update', customerToCreate);
this.clientsService.updateCustomer(customerToCreate).subscribe((response) => {
this.toastr.success('Customer added!', 'Success!');
});
}else{
this.toastr.error('Data cannot be saved because it has not changed','Error!');
return false;
}
}else{
console.log('first step saving', this.form.value);
let customerToCreate: CustomerForCreation = {
name: this.form.value.name,
lastName: this.form.value.lastName,
age: this.form.value.age,
countryId: this.form.value.address.countryId,
city: this.form.value.address.city,
zipCode: this.form.value.address.zipCode,
street: this.form.value.address.street,
phoneNumber: this.form.value.phoneNumber,
email: this.form.value.email,
gender: this.form.value.gender,
};
let isVary = this.checkCompliance();
console.log("nasz wynik:", isVary);
if(isVary){
console.log('create', customerToCreate);
this.clientsService.create(customerToCreate).subscribe((response) => {
this.toastr.info('Customer update!', 'Success!');
});
}else{
this.toastr.error('Data cannot be saved because it has not changed','Error!');
return false;
}
}
}
// updataCustomer() {
// console.log('first step saving update', this.form.value);
// let customerToCreate: CustomerForCreation = {
// name: this.form.value.name,
// lastName: this.form.value.lastName,
// age: this.form.value.age,
// countryId: this.form.value.address.countryId,
// city: this.form.value.address.city,
// zipCode: this.form.value.address.zipCode,
// street: this.form.value.address.street,
// phoneNumber: this.form.value.phoneNumber,
// email: this.form.value.email,
// gender: this.form.value.gender,
// };
// let isVary = this.checkCompliance();
// console.log("nasz wynik:", isVary);
// if(isVary){
// console.log('saving', customerToCreate);
// this.clientsService.updateCustomer(customerToCreate).subscribe((response) => {
// this.toastr.success('Customer update!', 'Success!');
// });
// }else{
// this.toastr.error('Data cannot be saved because it has not changed','Error!');
// return false;
// }
// }
checkCompliance():boolean{
if (this.copyOfCustomer.name !== this.form.value.name) {
return true;
}else if (this.copyOfCustomer.lastName !== this.form.value.lastName) {
return true;
}else if (this.copyOfCustomer.age !== this.form.value.age) {
return true;
}else if (this.copyOfCustomer.address.country.id !== this.form.value.address.countryId) {
return true;
}else if (this.copyOfCustomer.address.city !== this.form.value.address.city) {
return true;
}else if (this.copyOfCustomer.address.zipCode !== this.form.value.address.zipCode) {
return true;
}else if (this.copyOfCustomer.address.street !== this.form.value.address.street) {
return true;
}else if (this.copyOfCustomer.phoneNumber !== this.form.value.phoneNumber) {
return true;
}else if (this.copyOfCustomer.email !== this.form.value.email) {
return true;
}else if (this.copyOfCustomer.gender !== this.form.value.gender) {
return true;
}else{
return false;
}
}
showSuccess() {
this.toastr.success('Customer added!', 'Success!');
}
clean() {
this.toastr.success('Data cleared!', 'Success!');
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { faArrowLeft, faCity, faGlobeAmericas, faIdCard, faMailBulk, faRoad } from '@fortawesome/free-solid-svg-icons';
import { ToastrService } from 'ngx-toastr';
import { ClientsService } from 'src/app/clients/clients.service';
import { Address, AddressWithResidents } from 'src/app/clients/models/address';
import { Customer } from 'src/app/clients/models/customer';
import { AddressesService } from '../addresses.service';
@Component({
selector: 'app-addresses-details',
templateUrl: './addresses-details.component.html',
styleUrls: ['./addresses-details.component.scss']
})
export class AddressesDetailsComponent implements OnInit {
address: AddressWithResidents = null;
customer: Customer[];
faGlobaleAmericas = faGlobeAmericas;
faCity = faCity;
faMailBulk = faMailBulk;
faRoad = faRoad;
faIdCard = faIdCard;
faArrowLeft = faArrowLeft;
constructor(
private toastr: ToastrService,
private route: ActivatedRoute,
private addressesService: AddressesService
) {
}
ngOnInit(): void {
this.route.data.subscribe(data =>
this.address = data['addressDetails']);
}
}
<file_sep>
export class SortHelper {
// static sortingOfElements(sortOfOrderList: any[], propertyName: string, isAsc: boolean = true){
// let ascOrDescNumber: number;
// if (isAsc) {
// ascOrDescNumber = 1;
// }
// else {
// ascOrDescNumber = -1;
// }
// sortOfOrderList.sort(
// (a, b) => {
// if(a[propertyName] > b[propertyName]) {
// return 1 * ascOrDescNumber;
// }
// if(a[propertyName] < b[propertyName]) {
// return -1 * ascOrDescNumber;
// }
// return 0;
// });
// console.log(sortOfOrderList);
// return sortOfOrderList;
// }
static sortingOfElements(sortOfOrderList: any[], propertyName: string, isAsc: boolean = true){
sortOfOrderList.sort(
(a, b) => {
if(a[propertyName] > b[propertyName]) {
return isAsc ? 1 : -1
}
if(a[propertyName] < b[propertyName]) {
return isAsc ? -1 : 1
}
return 0;
});
return sortOfOrderList;
}
// static sortingOfElements2(sortOfOrderList: any[], propertyName: string, isAsc: boolean = true){
// sortOfOrderList.sort(
// (a, b) =>
// (a[propertyName] > b[propertyName]) ? (isAsc ? 1 : -1) :
// ((a[propertyName] < b[propertyName]) ? (isAsc ? -1 : 1) : 0));
// return sortOfOrderList;
// }
};<file_sep>import { HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { Observable, of } from 'rxjs';
import { catchError, tap } from 'rxjs/operators';
import { environment } from 'src/environments/environment';
import { Address, AddressForCreation, AddressWithResidents, Country } from '../clients/models/address';
@Injectable()
export class AddressesService {
baseUrl: string = environment.apiBaseUrl + "addresses";
headers: Headers = null;
options;
constructor(private httpClient:HttpClient) {
this.headers = new Headers({ 'Content-Type': 'application/json' });
this.options = { headers: this.headers };
}
getAllAddresses() : Observable<Address[]> {
return this.httpClient.get<Address[]>(this.baseUrl + "/GetAll").pipe(
tap(response =>
{
console.log("From services:", response);
}),
catchError(this.handleError<Address[]>("getAllAddresses"))
);
}
getById(id) : Observable<Address> {
return this.httpClient.get<Address>(this.baseUrl + "/getById/" + id).pipe(
tap(response =>
{
console.log("From services:", response);
}),
catchError(this.handleError<Address>("getById"))
);
}
getAddressWithResidents(id) : Observable<AddressWithResidents> {
return this.httpClient.get<AddressWithResidents>(this.baseUrl + "/GetAddressWithResidents/" + id).pipe(
tap(response =>
{
console.log("AddressWithResidents from services:", response);
}),
catchError(this.handleError<AddressWithResidents>("AddressWithResidents"))
);
}
getAllCountries() : Observable<Country[]>{
return this.httpClient.get<Country[]>(this.baseUrl + "/GetAllCountries").pipe(
tap(response =>
{
console.log("From services:", response);
}),
catchError(this.handleError<Country[]>("GetAllCountries"))
);
}
delete(id: string) : Observable<any> {
return this.httpClient.delete<any>(this.baseUrl + "/delete/" + id).pipe(
tap(response =>
{
console.log("From services:", response);
}),
catchError(this.handleError<any>("delete"))
);
}
create(addressForCreation: AddressForCreation) : Observable<any>{
return this.httpClient.post<any>(this.baseUrl + "/Create", addressForCreation).pipe(
tap(response =>
{
console.log("Create address", response);
}),
catchError(this.handleError<any>("create"))
);
}
handleError<T>(operation, result?: T){
return (error: any): Observable<T> => {
console.error(operation);
console.error(error);
return of(result as T);
};
}
}<file_sep>import { HttpClient, HttpParams } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { Observable } from 'rxjs/internal/Observable';
import { of } from 'rxjs/internal/observable/of';
import { tap, catchError } from 'rxjs/operators';
import { Order, OrdersForCreation, OrderStatus, OrderStatusChangeParameters } from '../clients/models/orders';
import { environment } from './../../environments/environment';
@Injectable()
export class OrdersService {
baseUrl: string = environment.apiBaseUrl + "orders/";
headers: Headers = null;
options: { headers: Headers; };
constructor(private httpClient:HttpClient) {
this.headers = new Headers({ 'Content-Type': 'application/json' });
this.options = { headers: this.headers };
}
getAllListItems() : Observable<Order[]> {
return this.httpClient.get<Order[]>(this.baseUrl + "GetAllListItems").pipe(
tap(response =>
{
console.log("From services:", response);
}),
catchError(this.handleError<Order[]>("getAllCustomers"))
);
}
create(ordersForCreation: OrdersForCreation) : Observable<any>{
return this.httpClient.post<any>(this.baseUrl + "CreateOrder", ordersForCreation).pipe(
tap(response =>
{
console.log("Create orders", response);
}),
catchError(this.handleError<any>("CreateOrder"))
);
}
getAllOrderStatus() : Observable<OrderStatus[]>{
return this.httpClient.get<OrderStatus[]>(environment.apiBaseUrl + "orderStatuses/getAll").pipe(
tap(response =>
{
console.log("From services:", response);
}),
catchError(this.handleError<OrderStatus[]>("getAllOrderStatus"))
);
};
changeOrderStatus(orderStatusChangeParameters: OrderStatusChangeParameters): Observable<any>{
let params = new HttpParams();
params = params.append("OrderId", orderStatusChangeParameters.orderId);
params = params.append('NewOrderStatusId', orderStatusChangeParameters.newOrderStatusId);
return this.httpClient.put<any>(this.baseUrl + "ChangeOrderStatus", null, { params: params }).pipe(
tap(response =>
{
console.log("Save Status", response);
}),
catchError(this.handleError<any>("putOrderStatus"))
);
};
getDays(lastUpdateDate: Date, theNumberOfDays: Date){
// let data1: Date = parseInt(lastUpdateDate);
// let data2: any = parseInt(theNumberOfDays);
let data = new Date(Math.abs(lastUpdateDate.getTime() - theNumberOfDays.getTime()));
return(data.getFullYear() - 1970) + " years " + data.getMonth() + " months " + data.getDate() + " days" ;
};
handleError<T>(operation, result?: T){
return (error: any): Observable<T> => {
// TODO: send the error to remote logging infrastructure
console.error(operation);
console.error(error); // log to console instead
// Let the app keep running by returning an empty result.
return of(result as T);
};
}
}<file_sep>import { Component, OnInit } from '@angular/core';
import { faFontAwesome } from '@fortawesome/free-brands-svg-icons';
import { faSort, faObjectGroup, faFilter, faListOl, faMapMarkedAlt, faSearch, faCheckSquare, faSquare } from '@fortawesome/free-solid-svg-icons';
@Component({
selector: 'app-home',
templateUrl: './home.component.html',
styleUrls: ['./home.component.scss']
})
export class HomeComponent implements OnInit {
faFontAwesome = faFontAwesome;
faSort = faSort;
faObjectGroup = faObjectGroup;
faFilter = faFilter;
faListOl = faListOl;
faMapMarkedAlt = faMapMarkedAlt;
faSearch = faSearch;
faCheckSquare = faCheckSquare;
faSquare = faSquare;
constructor() { }
ngOnInit() {
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { NgForm } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import {
faEdit,
faInfo,
faPlusCircle,
faPlusSquare,
faSort,
faSortDown,
faSortUp,
faTrash,
IconDefinition,
} from '@fortawesome/free-solid-svg-icons';
import { ToastrService } from 'ngx-toastr';
import { SortHelper } from 'src/app/shared/helpers/sort-helper';
import { MyPager, OrderedItem } from 'src/app/shared/models/shared.models';
import { ClientsService } from '../clients.service';
import { Customer } from '../models/customer';
@Component({
selector: 'app-client-list',
templateUrl: './client-list.component.html',
styleUrls: ['./client-list.component.scss'],
})
export class ClientListComponent implements OnInit {
isDetailsClicked: boolean = false;
isAddingMode: boolean = false;
newCustomer: Customer = null;
id: number = 11114;
customerObjectList: Customer[] = [];
orderedCustomers: OrderedItem[] = [];
isAsc: boolean;
selectedColumnName: SortColumnsInClients;
SortColumns: typeof SortColumnsInClients = SortColumnsInClients;
upArrowIcon: boolean;
downArrowIcon: boolean;
bothArrowIcon: boolean;
faTrash = faTrash;
faEdit = faEdit;
faInfo = faInfo;
faPlusSquare = faPlusSquare;
faSortUp = faSortUp;
faSortDown = faSortDown;
faSort = faSort;
constructor(
private toastr: ToastrService,
private route: ActivatedRoute,
private clientService: ClientsService
) {
}
ngOnInit() {
this.route.data.subscribe((value) => {
this.customerObjectList = value['customerList'];
this.selectedColumnName = SortColumnsInClients.Age;
this.isAsc = true;
this.sort();
});
}
deleteCustomer(customerId: string) {
this.clientService.delete(customerId).subscribe((response) => {
console.log('delete');
this.toastr.info('Customer has been deleted!', 'Info');
});
}
addCustomer() {
this.isAddingMode = !this.isAddingMode;
this.isDetailsClicked = false;
this.newCustomer = new Customer(
(++this.id).toString(),
'',
'',
0,
0,
null,
'',
'',
''
);
}
onSubmit(addingUser: NgForm) {
console.log(addingUser.value);
}
onCustomerAdd(newCustomer: Customer) {
newCustomer.id = (++this.id).toString();
this.toastr.success('A new customer has been added!', 'New Customer');
}
onPageChanged(event: MyPager) {
this.orderedCustomers = event.pageOfItems;
console.log("ordered: ", this.orderedCustomers);
}
getArrowClass(columnNameClicked: SortColumnsInClients): IconDefinition {
if (columnNameClicked === this.selectedColumnName) {
if (this.isAsc === false) {
return this.faSortUp;
} else {
return this.faSortDown;
}
} else {
return this.faSort;
}
}
sortList(columnNameClicked: SortColumnsInClients) {
if (columnNameClicked === this.selectedColumnName) {
this.isAsc = !this.isAsc;
} else {
this.selectedColumnName = columnNameClicked;
this.isAsc = true;
}
this.sort();
}
sort() {
switch (this.selectedColumnName) {
case SortColumnsInClients.FullName:
this.customerObjectList = JSON.parse(JSON.stringify(
SortHelper.sortingOfElements(this.customerObjectList, 'name', this.isAsc)));
break;
case SortColumnsInClients.Sex:
this.customerObjectList = JSON.parse(JSON.stringify(
SortHelper.sortingOfElements(this.customerObjectList, 'gender', this.isAsc )));
break;
case SortColumnsInClients.Age:
let copyOfCustomers = JSON.parse(JSON.stringify(this.customerObjectList));
this.customerObjectList = JSON.parse(JSON.stringify(
SortHelper.sortingOfElements(copyOfCustomers, 'age', this.isAsc )));
break;
case SortColumnsInClients.City:
this.customerObjectList = JSON.parse(JSON.stringify(
SortHelper.sortingOfElements(this.customerObjectList, 'address', this.isAsc)));
break;
default:
this.customerObjectList = JSON.parse(JSON.stringify(
SortHelper.sortingOfElements(this.customerObjectList, 'name', this.isAsc)));
break;
}
}
}
export enum SortColumnsInClients{
Id = 0,
FullName,
Sex,
Age,
City,
}<file_sep>import { Injectable } from '@angular/core';
import { ActivatedRouteSnapshot, Resolve, RouterStateSnapshot } from '@angular/router';
import { Observable, of } from 'rxjs';
import { Order } from '../clients/models/orders';
import { OrdersService } from './orders.service';
@Injectable()
export class OrdersResolver implements Resolve<Order[]>{
constructor(private ordersService: OrdersService) {}
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<Order[]>{
const id: string = route.paramMap.get('id');
// let orders: Order[] = [ {id: "1", orderedByCustomerId: "<NAME>", price: 123, orderedDate: new Date(2020, 10, 1), lastUpdateDate: new Date(2020, 10, 1), status: "ready"}]
return this.ordersService.getAllListItems();
// return of(orders);
}
}
<file_sep>import { HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { Observable, of } from 'rxjs';
import { catchError, tap } from 'rxjs/operators';
import { environment } from 'src/environments/environment';
import { Address, AddressForCreation, AddressWithResidents } from '../clients/models/address';
import { UserAccountForCreation } from '../clients/models/signUp';
@Injectable()
export class SignUpService {
baseUrl: string = environment.apiBaseUrl + "addresses";
headers: Headers = null;
options;
constructor(private httpClient:HttpClient) {
this.headers = new Headers({ 'Content-Type': 'application/json' });
this.options = { headers: this.headers };
}
create(userAccountForCreation: UserAccountForCreation) : Observable<any>{
return this.httpClient.post<any>(this.baseUrl + "create", userAccountForCreation).pipe(
tap(response =>
{
console.log("Create user", response);
}),
catchError(this.handleError<any>("create"))
);
}
handleError<T>(operation, result?: T){
return (error: any): Observable<T> => {
// TODO: send the error to remote logging infrastructure
console.error(operation);
console.error(error); // log to console instead
// Let the app keep running by returning an empty result.
return of(result as T);
};
}
}<file_sep>import { Injectable } from '@angular/core';
import { Resolve } from '@angular/router';
import { Observable } from 'rxjs';
import { Address } from '../clients/models/address';
import { AddressesService } from './addresses.service';
@Injectable()
export class AddressesResolver implements Resolve<Address[]> {
constructor(private addressesService: AddressesService) {
}
resolve(): Observable<Address[]> {
return this.addressesService.getAllAddresses();
}
};
<file_sep>import { HttpClient, HttpHeaders } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { Customer } from './models/customer';
import { catchError, tap } from 'rxjs/operators';
import { of } from 'rxjs/internal/observable/of';
import { Observable } from 'rxjs';
import { CustomerForCreation } from './models/customerForCreation';
import { environment } from 'src/environments/environment';
@Injectable()
export class ClientsService {
baseUrl: string = environment.apiBaseUrl + "customers";
headers: Headers = null;
options;
constructor(private httpClient:HttpClient) {
this.headers = new Headers({ 'Content-Type': 'application/json' });
this.options = { headers: this.headers };
}
getAllClients() : Observable<Customer[]> {
return this.httpClient.get<Customer[]>(this.baseUrl + "/getall").pipe(
tap(response =>
{
console.log("From services:", response);
}),
catchError(this.handleError<Customer[]>("getAllCustomers"))
);
}
getById(id: string) : Observable<Customer> {
return this.httpClient.get<Customer>(this.baseUrl + "/getById/" + id).pipe(
tap(response =>
{
console.log("From services:", response);
}),
catchError(this.handleError<Customer>("getById"))
);
}
create(customerForCreation: CustomerForCreation) : Observable<any>{
return this.httpClient.post<any>(this.baseUrl + "/CreateCustomer", customerForCreation).pipe(
tap(response =>
{
console.log("Create customer", response);
}),
catchError(this.handleError<any>("create"))
);
}
updateCustomer(customerForCreation: CustomerForCreation) : Observable<any>{
return this.httpClient.post<any>(this.baseUrl + "/UpdateCustomer", customerForCreation).pipe(
tap(response =>
{
console.log("Update Customer", response);
}),
catchError(this.handleError<any>("update"))
);
}
delete(customerId: string) : Observable<any> {
return this.httpClient.delete<any>(this.baseUrl + "/delete/" + customerId).pipe(
tap(response =>
{
console.log("From services:", response);
}),
catchError(this.handleError<any>("delete"))
);
}
handleError<T>(operation, result?: T){
return (error: any): Observable<T> => {
// TODO: send the error to remote logging infrastructure
console.error(operation);
console.error(error); // log to console instead
// Let the app keep running by returning an empty result.
return of(result as T);
};
}
}
<file_sep>import { Injectable } from "@angular/core";
import { ActivatedRouteSnapshot, Resolve, RouterStateSnapshot } from '@angular/router';
import { Observable, of } from 'rxjs';
import { ClientsService } from './clients.service';
import { Address, Country } from './models/address';
import { Customer } from './models/customer';
@Injectable()
export class ClientEditResolver implements Resolve<Customer>{
constructor(private clientsService: ClientsService) {
}
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<Customer>{
const id: string = route.paramMap.get('id');
let customer: Customer = null;
if(id === "0"){
console.log("creating client from resolver...");
let address: Address = {
id: "",
city: "",
zipCode: 0,
street: "",
country: new Country,
};
customer = new Customer("", "", "", 0, 0, address, "", "", "");
}else{
console.log("fetching client from resolver...");
return this.clientsService.getById(id);
}
return of(customer);
}
}
<file_sep>import { Component, NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { AboutComponent } from './about/about.component';
import { AddressesResolver } from './addresses/addresses.resolver';
import { ContactComponent } from './contact/contact.component';
import { HomeComponent } from './home/home.component';
import { InProgressComponent } from './inProgress/inProgress.component';
import { LoginPanelComponent } from './loginPanel/loginPanel.component';
import { NotFoundComponent } from './not-found/not-found.component';
import { SignUpPanelComponent } from './signUpPanel/signUpPanel.component';
const routes: Routes = [
{
path: '',
children: [
{
path:'',
redirectTo: 'home',
pathMatch: 'full',
},
{
path: 'about',
component: AboutComponent,
},
{
path: 'contact',
component: ContactComponent,
},
{
path: 'home',
component: HomeComponent,
},
{
path: 'orders',
loadChildren: () => import('./orders/orders.module').then(m => m.OrdersModule),
},
{
path: 'loginPanel',
component: LoginPanelComponent,
},
{
path: 'signUpPanel',
component: SignUpPanelComponent,
},
{
path: 'inProgress',
component: InProgressComponent,
},
{
path: 'clients',
loadChildren: () => import('./clients/clients.module').then(m => m.ClientsModule),
},
{
path:'addresses',
loadChildren: () => import('./addresses/addresses.module').then(m => m.AddressesModule),
},
{
path: '**',
redirectTo: '/404',
},
{
path: '404',
component: NotFoundComponent,
}
]}
];
@NgModule({
imports: [RouterModule.forRoot(routes, {scrollPositionRestoration: 'enabled'})],
exports: [RouterModule],
providers: [
AddressesResolver
]
})
export class AppRoutingModule {}<file_sep>import { Component, OnInit } from '@angular/core';
import { faKey, faUser } from '@fortawesome/free-solid-svg-icons';
@Component({
selector: 'app-loginPanel',
templateUrl: './loginPanel.component.html',
styleUrls: ['./loginPanel.component.scss']
})
export class LoginPanelComponent implements OnInit {
faKey = faKey;
faUser =faUser;
constructor() { }
ngOnInit() {
}
}
<file_sep>import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { EditCustomerComponent } from './edit-customer/edit-customer.component';
import { ClientDetailsComponent } from './client-details/client-details.component';
import { ClientListResolver } from './client-list.resolver';
import { ClientListComponent } from './client-list/client-list.component';
import { ClientsDetailsResolver } from './clients-details.resolver';
import { ClientEditResolver } from './client-edit.resolver';
const routes: Routes = [
{
path: '',
children: [
{
path:'',
redirectTo: '/list',
pathMatch: 'full',
},
{
path:'list',
component: ClientListComponent,
resolve: {
customerList: ClientListResolver,
}
},
{
path:'add',
redirectTo: '/edit/0',
pathMatch: 'full',
},
{
path: 'edit/:id',
component: EditCustomerComponent,
resolve: {
customer: ClientEditResolver,
}
},
{
path: 'details/:id',
component: ClientDetailsComponent,
resolve: {
customer: ClientsDetailsResolver,
}
},
]}
];
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule],
providers: [
ClientsDetailsResolver,
ClientListResolver,
ClientEditResolver
]
})
export class ClientsRoutingModule {
}
<file_sep>export const environment = {
production: true,
baseUrl: "https://api.kacper-berganski-portfolio.pl/",
apiBaseUrl: "https://api.kacper-berganski-portfolio.pl/api/"
};
<file_sep>import { Address } from './address';
import { Gender } from './customer';
export class CustomerForCreation{
name: string;
lastName: string;
age: number;
countryId: string;
city: string;
zipCode: string;
street: string;
phoneNumber: string;
email: string;
gender: number;
}<file_sep>import { HttpClient } from '@angular/common/http';
import {
AfterViewInit,
Component,
ElementRef,
Input,
OnChanges,
OnInit,
SimpleChanges,
ViewChild,
ɵConsole,
} from '@angular/core';
import { FormControl, FormGroup, Validators } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import { faArrowLeft, faSave, faTimes } from '@fortawesome/free-solid-svg-icons';
import { ToastrService } from 'ngx-toastr';
import { Observable, of } from 'rxjs';
import { catchError, tap } from 'rxjs/operators';
import {
Address,
AddressForCreation,
Country,
} from 'src/app/clients/models/address';
import { MyFormGroup } from 'src/app/shared/extentions/myFormGroup';
import { AddressesService } from '../addresses.service';
@Component({
selector: 'app-addresses-add',
templateUrl: './addresses-add.component.html',
styleUrls: ['./addresses-add.component.scss'],
})
export class AddressesAddComponent implements OnInit, AfterViewInit, OnChanges {
@ViewChild('viewCountries') countrysChild: ElementRef;
@Input() addressesList: Address[];
copyAddressList: AddressForCreation[] = [];
address: AddressForCreation = {
countryId: '',
city: '',
street: '',
zipCode: '',
};
form: MyFormGroup = null;
faSave = faSave;
faTimes = faTimes;
faArrowLeft = faArrowLeft;
baseUrl: string = 'https://api.kacper-berganski-portfolio.pl/api/addresses';
countries: Country[] = [];
polandId: string;
initialCountry: string;
countryId: string;
constructor(
private httpClient: HttpClient,
private toastr: ToastrService,
private addressService: AddressesService,
private route: ActivatedRoute
) {
let zipCodePattern = '[0-9]{2}-[0-9]{3}'; // XX-XXX
let cityPattern = '[a-zA-Z ]*';
this.form = new MyFormGroup({
countryId: new FormControl(this.address.countryId, [Validators.required]),
zipCode: new FormControl(this.address.zipCode, [
Validators.required,
Validators.minLength(5),
Validators.maxLength(7),
Validators.pattern(zipCodePattern),
]),
city: new FormControl(this.address.city, [
Validators.required,
Validators.minLength(3),
Validators.maxLength(30),
Validators.pattern(cityPattern)
]),
street: new FormControl(this.address.street, [
Validators.required,
Validators.minLength(3),
Validators.maxLength(40),
]),
});
}
ngAfterViewInit(): void {
//this.countrysChild.nativeElement.value = this.initialCountry;;
}
ngOnInit() {
this.addressService.getAllCountries().subscribe((response) => {
this.countries = response;
this.polandId = this.countries.find((x) => x.name === 'Poland')?.id;
this.initialCountry =
this.address.countryId === '' || !this.address.countryId
? this.polandId
: this.address.countryId;
this.form.value.countryId = this.initialCountry;
this.form.controls["countryId"].setValue(this.initialCountry);
});
}
ngOnChanges(changes: SimpleChanges): void {
}
saveAddress() {
let addressToCreate: AddressForCreation = {
countryId: this.form.value.countryId,
city: this.form.value.city,
zipCode: this.form.value.zipCode,
street: this.form.value.street,
};
console.log("saving", addressToCreate);
this.addressService.create(addressToCreate).subscribe((response) => {});
}
showSuccess() {
this.toastr.success('Customer added!', 'Success!');
}
clean() {
this.toastr.success('Data cleared!', 'Success!');
}
handleError<T>(operation, result?: T) {
return (error: any): Observable<T> => {
return of(result as T);
};
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { FaIconLibrary } from '@fortawesome/angular-fontawesome';
import { faEnvelope, faPhone, IconName, IconLookup, faCheckSquare, faSquare } from '@fortawesome/free-solid-svg-icons';
import { faStackOverflow, faGithub, faMedium, faFacebookSquare, faLinkedin, faGithubSquare } from '@fortawesome/free-brands-svg-icons';
import { fas } from '@fortawesome/free-solid-svg-icons';
import { isContinueStatement } from 'typescript';
@Component({
selector: 'app-footer',
templateUrl: './footer.component.html',
styleUrls: ['./footer.component.scss']
})
export class FooterComponent implements OnInit {
faPhone = faPhone;
faEnvelope = faEnvelope;
faFacebookSquare = faFacebookSquare;
faLinkedin = faLinkedin;
faGithubSquare = faGithubSquare;
constructor(private library: FaIconLibrary) {
library.addIcons(faSquare, faCheckSquare, faSquare, faCheckSquare, faStackOverflow, faGithub, faMedium);
}
ngOnInit(): void {
}
}
<file_sep>import { FormGroup } from '@angular/forms';
export class MyFormGroup extends FormGroup{
checkError(propName: string, errorType: string) : boolean {
return(
this.get(propName)?.invalid &&
this.get(propName)?.hasError(errorType) &&
this.get(propName)?.touched &&
this.get(propName)?.dirty
);
}
checkIsRequired(propName: string) : boolean {
return this.checkError(propName, 'required');
}
checkIsMinLength(propName: string) : boolean {
return this.checkError(propName, 'minlength');
}
checkIsMaxLength(propName: string) : boolean {
return this.checkError(propName, 'maxlength');
}
checkIsPattern(propName: string) : boolean {
return this.checkError(propName, 'pattern');
}
}<file_sep>export class Contact{
name: string;
lastName: string;
phoneNumber: string;
subject: string;
email:string;
}<file_sep>import { Injectable } from "@angular/core";
import { ActivatedRouteSnapshot, Resolve, RouterStateSnapshot } from '@angular/router';
import { Observable } from 'rxjs';
import { ClientsService } from './clients.service';
import { Customer } from './models/customer';
@Injectable()
export class ClientsDetailsResolver implements Resolve<Customer>{
constructor(private clientsService: ClientsService) {}
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<Customer>{
const id: string = route.paramMap.get('id');
return this.clientsService.getById(id);
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { FormControl, FormGroup, Validators } from '@angular/forms';
import { faEnvelope, faExclamation, faKey, faUser } from '@fortawesome/free-solid-svg-icons';
import { UserAccountForCreation } from '../clients/models/signUp';
import { MyFormGroup } from '../shared/extentions/myFormGroup';
// import { FormHelpersService } from '../shared/helpers/formHelpers.service';
import { SignUpService } from './signUp.service';
@Component({
selector: 'app-signUpPanel',
templateUrl: './signUpPanel.component.html',
styleUrls: ['./signUpPanel.component.scss']
})
export class SignUpPanelComponent implements OnInit {
faExclamation = faExclamation;
faKey = faKey;
faEnvelope = faEnvelope;
faUser = faUser;
form: MyFormGroup = null;
userAccount: UserAccountForCreation = {
firstName: "",
lastName: "",
emailAddress: "",
password: ""
};
constructor(
private signUpService: SignUpService,
// public formHelper: FormHelpersService
) {
this.form = new MyFormGroup({
"firstName": new FormControl(this.userAccount.firstName,[
Validators.required,
Validators.minLength(3),
Validators.maxLength(25),
Validators.pattern('[a-zA-Z ]*')]),
"lastName": new FormControl(this.userAccount.lastName,[
Validators.required,
Validators.minLength(3),
Validators.maxLength(30),
Validators.pattern('[a-zA-Z]*')]),
"emailAddress": new FormControl(this.userAccount.emailAddress,[
Validators.required,
Validators.minLength(5),
Validators.maxLength(40),
Validators.pattern('^[a-z0-9._%+-]+@[a-z0-9.-]+\\.[a-z]{2,4}$')]),
"password": new FormControl(this.userAccount.password,[
Validators.required,
Validators.minLength(8),
Validators.maxLength(40)])
});
// this.formHelper.form = this.form;
}
ngOnInit() {
}
// checkIsRequired(propName: string) : boolean {
// return this.checkError(propName, 'required');
// }
// checkIsMinLength(propName: string) : boolean {
// return this.checkError(propName, 'minlength');
// }
// checkIsMaxLength(propName: string) : boolean {
// return this.checkError(propName, 'maxlength');
// }
// checkIsPattern(propName: string) : boolean {
// return this.checkError(propName, 'pattern');
// }
// checkError(propName: string, errorType: string): boolean{
// return (
// this.form.get(propName)?.invalid &&
// this.form.get(propName)?.hasError(errorType) &&
// this.form.get(propName).touched &&
// this.form.get(propName).dirty
// );
// }
saveUserAccount(){
console.log("first step saving", this.form.value);
let userToCreate: UserAccountForCreation = {
firstName: this.form.value.firstName,
lastName: this.form.value.lastName,
emailAddress: this.form.value.emailAddress,
password: <PASSWORD>
}
console.log("saving", userToCreate)
this.signUpService.create(userToCreate).subscribe(response => {
console.log("Subscribe for creation")
});
};
}<file_sep>import { CommonModule, CurrencyPipe } from '@angular/common';
import { HttpClientModule } from '@angular/common/http';
import { NgModule } from '@angular/core';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { AlertModule } from 'ngx-bootstrap/alert';
import { ToastrModule } from 'ngx-toastr';
import { AddressesAddComponent } from './addresses-add/addresses-add.component';
import { AddressesDetailsComponent } from './addresses-details/addresses-details.component';
import { AddressesRoutingModule } from './addresses-routing.module';
import { AddressesComponent } from './addresses.component';
import { CarouselModule } from 'ngx-bootstrap/carousel';
import { FontAwesomeModule } from '@fortawesome/angular-fontawesome';
import { SharedModule } from '../shared/shared.module';
@NgModule({
declarations: [
AddressesComponent,
AddressesAddComponent,
AddressesDetailsComponent
],
imports: [
CommonModule,
FormsModule,
AlertModule.forRoot(),
ReactiveFormsModule,
ToastrModule.forRoot(),
AddressesRoutingModule,
HttpClientModule,
CarouselModule.forRoot(),
FontAwesomeModule,
SharedModule
],
providers: [
CurrencyPipe
],
})
export class AddressesModule { }<file_sep>import { CurrencyPipe } from '@angular/common';
import { AfterContentInit, AfterViewInit, Component, ElementRef, OnInit, ViewChild } from '@angular/core';
import { FormControl, FormGroup, Validators } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import { faBroom, faExclamation, faSave, faTimes, faWindowClose } from '@fortawesome/free-solid-svg-icons';
import { BsModalRef, BsModalService, ModalDirective } from 'ngx-bootstrap/modal';
import { ToastrService } from 'ngx-toastr';
import { ClientsService } from 'src/app/clients/clients.service';
import { Customer } from 'src/app/clients/models/customer';
import {Order, OrdersForCreation } from 'src/app/clients/models/orders';
import { MyFormGroup } from 'src/app/shared/extentions/myFormGroup';
import { OrdersService } from '../orders.service';
@Component({
selector: 'app-orders-add-modal',
templateUrl: './orders-add-modal.html',
styleUrls: ['./orders-add-modal.scss']
})
export class OrdersAddModal implements OnInit, AfterViewInit{
@ViewChild('ordersAddModal' , { static: false }) modal: ModalDirective;
@ViewChild('price') priceChild: ElementRef;
ordersList: Order[] = [];
clientsList: Customer[];
ordersAddList: OrdersForCreation[];
form: MyFormGroup = null;
faTimes = faTimes;
faSave = faSave;
faExclamation = faExclamation;
order: OrdersForCreation ={
orderedByCustomerId: "",
price: 0,
};
constructor(
private toastr: ToastrService,
private ordersService: OrdersService,
private customerService: ClientsService,
private route: ActivatedRoute,
private modalService: BsModalService
) {
this.form = new MyFormGroup({
"price": new FormControl(this.order.price,[Validators.required, Validators.pattern("[0-9]*[.]?[0-9]+")]),
"orderedByCustomerId": new FormControl(this.order.orderedByCustomerId,[Validators.required, Validators.minLength(3), Validators.maxLength(40)])
});
this.clientsList = [];
}
ngOnInit() {
this.customerService.getAllClients().subscribe(clients =>
this.clientsList = clients);
}
ngAfterViewInit(): void {
this.priceChild.nativeElement.focus();
this.priceChild.nativeElement.setAttribute('placeholder', "Enter price");
this.priceChild.nativeElement.value = null;
}
saveOrders(){
console.log("first step saving", this.form.value);
let ordersToCreate: OrdersForCreation = {
price: this.form.value.price,
orderedByCustomerId: this.form.value.orderedByCustomerId,
}
this.ordersService.create(ordersToCreate).subscribe(response => {
console.log("Subscribe for creation")
this.ordersList = JSON.parse(JSON.stringify(ordersToCreate));
});
this.modal.hide();
};
showSuccess() {
this.toastr.success('Order added!','Success!');
}
clean() {
this.toastr.success('Data cleared!','Success!');
}
close(){
this.modal.hide();
}
show() {
this.modal.show();
}
}
<file_sep>import {
Component,
EventEmitter,
OnInit,
Output,
} from '@angular/core';
import {
faSave,
faTimes,
faTrash,
} from '@fortawesome/free-solid-svg-icons';
import {
BsModalRef,
} from 'ngx-bootstrap/modal';
import { ToastrService } from 'ngx-toastr';
import {
OrderStatus,
OrderStatusChangeParameters,
} from 'src/app/clients/models/orders';
import { OrderedItem } from 'src/app/shared/models/shared.models';
import { OrdersService } from '../orders.service';
@Component({
selector: 'app-orders-option-modal',
templateUrl: './orders-option-modal.html',
styleUrls: ['./orders-option-modal.scss'],
})
export class OrdersOptionModal implements OnInit {
selectedOrdersIdsFromParent: string[] = [];
selectedOrdersFromParent: OrderedItem[] = [];
orderStatuses: OrderStatus[] = [];
@Output() updateOrderListEvent = new EventEmitter<any>();
parametersRequests: OrderStatusChangeParameters[] = [];
faSave = faSave;
faTimes = faTimes;
faTrash = faTrash;
constructor(
public bsModalRef: BsModalRef,
private toastr: ToastrService,
private ordersService: OrdersService
) {
}
ngOnInit() {
this.ordersService
.getAllOrderStatus()
.subscribe((response) => (this.orderStatuses = response));
}
saveNewOrderStatus() {
this.parametersRequests.forEach((paramReq) => {
this.ordersService.changeOrderStatus(paramReq).subscribe(() => {
this.toastr.success('Order status changed!', 'Success');
this.ordersService.getAllListItems().subscribe((response) => {
this.updateOrderListEvent.emit(response);
});
this.bsModalRef.hide()
});
});
}
close() {
this.bsModalRef.hide();
}
deleteOrderFromList(orderId: string) {
let index = this.selectedOrdersFromParent.findIndex(
(order) => order.item.id == orderId
);
this.selectedOrdersFromParent.splice(index, 1);
console.log(this.selectedOrdersFromParent);
}
checkStatus(orderStatusId: string, cokolwiekCoPowiemywHtmlu: string) {
let orderStatus = this.orderStatuses.find(
(status) => status.id == orderStatusId
);
if (!!orderStatus && orderStatus.name === cokolwiekCoPowiemywHtmlu) {
return true;
}
return false;
}
getStatusName(statusId: string): string {
let orderStatus = this.orderStatuses.find(
(status) => status.id == statusId
);
return orderStatus?.name;
}
onSelectedStatus(newOrderStatusId: string) {
this.parametersRequests = [];
this.selectedOrdersFromParent.forEach((order) => {
order.item.statusId = newOrderStatusId;
this.parametersRequests.push({
orderId: order.item.id,
newOrderStatusId: newOrderStatusId,
});
});
}
}<file_sep>import { Injectable } from '@angular/core';
import { Resolve, ActivatedRouteSnapshot, RouterStateSnapshot } from '@angular/router';
import { Observable } from 'rxjs';
import { ClientsService } from './clients.service';
import { Customer } from './models/customer';
@Injectable()
export class ClientListResolver implements Resolve<Customer[]> {
customers: Customer[];
constructor(private clientsService: ClientsService) {
}
resolve(): Observable<Customer[]> {
return this.clientsService.getAllClients();
}
};
<file_sep>import { ClassGetter } from '@angular/compiler/src/output/output_ast';
import { Component, OnInit, ViewChild } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import {
faCheck,
faFilter,
faPlusSquare,
faSort,
faSortDown,
faSortUp,
IconDefinition,
} from '@fortawesome/free-solid-svg-icons';
import {
BsModalRef,
BsModalService,
ModalDirective,
} from 'ngx-bootstrap/modal';
import { ToastrService } from 'ngx-toastr';
import { BehaviorSubject, Observable } from 'rxjs';
import {
Order,
OrderStatus,
OrderStatusChangeParameters,
} from '../clients/models/orders';
import { SortHelper } from '../shared/helpers/sort-helper';
import { MyPager, OrderedItem } from '../shared/models/shared.models';
import { OrdersOptionModal } from './orders-option/orders-option-modal';
import { OrdersService } from './orders.service';
@Component({
selector: 'app-orders',
templateUrl: './orders.component.html',
styleUrls: ['./orders.component.scss'],
})
export class OrdersComponent implements OnInit {
ordersList: Order[] = [];
ordersListInitial: Order[] = [];
id: number = 1;
orderStatuses: OrderStatus[] = [];
selectedOrderStatus: OrderStatus;
optionDisabled: boolean = false;
faPlusSquare = faPlusSquare;
faSort: IconDefinition = faSort;
faSortUp: IconDefinition = faSortUp;
faSortDown: IconDefinition = faSortDown;
faFilter = faFilter;
faCheck = faCheck;
orderedOrders: BehaviorSubject<OrderedItem[]> = new BehaviorSubject<OrderedItem[]>(null);
selectedOrdersId: string[] = [];
selectedColumnName: SortColumnsBy;
selectedColumnStatus: SortStatus;
SortColumnsStatus: typeof SortStatus = SortStatus;
SortColumns: typeof SortColumnsBy = SortColumnsBy;
FilterColumnsBy : typeof FilterColumnsBy = FilterColumnsBy;
selectedColumnNameFilter : FilterColumnsBy = FilterColumnsBy.OrderedByCustomer;
statusList: string[] = [];
isAsc: boolean;
checkBoxSelect: boolean = false;
filterClick: boolean = false;
pageSizeFromOrders = 10;
upArrowIcon: boolean;
downArrowIcon: boolean;
bothArrowIcon: boolean;
myInputValue: string;
OrderedDateValue: Date;
LastUpdateDateValue: Date;
DaysOfLastUpdateValueOne: number;
DaysOfLastUpdateValueTwo: number;
PriceValueOne: number;
PriceValueTwo: number;
EmptyValue: any;
orderedOrdersToShow: Observable<OrderedItem[]>;
@ViewChild('addOrderModal') addOrderModalRef: ModalDirective;
optionOrderModalRef: BsModalRef;
constructor(
private toastr: ToastrService,
private route: ActivatedRoute,
private ordersService: OrdersService,
private modalService: BsModalService,
) {
this.route.data.subscribe((value) => {
this.ordersListInitial = value['ordersList'];
this.ordersListInitial.map(order => {
let date = new Date(order.creationDate.toString().split('T')[0]);
order.creationDate = new Date(date.getFullYear(), date.getMonth(), date.getDate());
});
this.ordersList = JSON.parse(JSON.stringify(this.ordersListInitial));
console.log(this.ordersList);
this.ordersService
.getAllOrderStatus()
.subscribe((response) => (this.orderStatuses = response));
});
}
ngOnInit() {
this.orderedOrdersToShow = this.orderedOrders.asObservable();
this.selectedColumnName = SortColumnsBy.Id;
for(let item in this.SortColumnsStatus){
if (isNaN(Number(item))){
this.statusList.push(item);
}
}
}
ngAfterViewInit(): void {}
getDays(lastUpdateDate?: Date): number {
if (lastUpdateDate == null) {
return -1;
}
let now = new Date(Date.now());
let data = now.getTime() - new Date(lastUpdateDate.toString()).getTime()
return Math.floor(data / (1000 * 3600 * 24)) ;
}
onSelectedStatus(newOrderStatusId: string, orderId: string) {
let parametersRequest: OrderStatusChangeParameters = {
orderId: orderId,
newOrderStatusId: newOrderStatusId,
};
this.ordersService
.changeOrderStatus(parametersRequest)
.subscribe((response) => {
this.ordersService.getAllListItems().subscribe((response) => {
this.ordersList = response;
});
this.toastr.success('Order status changed!', 'Success');
});
}
getSelected(orderId: string): OrderStatus {
return this.orderStatuses.find((status) => status.id == orderId);
}
checkStatus(orderStatusId: string, cokolwiekCoPowiemywHtmlu: string) {
let orderStatus = this.orderStatuses.find(
(status) => status.id == orderStatusId
);
if (!!orderStatus && orderStatus.name === cokolwiekCoPowiemywHtmlu) {
return true;
}
return false;
}
onPageChanged(event: MyPager) {
this.checkBoxSelect = false;
this.selectedOrdersId = [];
this.orderedOrders.next(event.pageOfItems);
}
changePageSize(filterVal: number) {
this.pageSizeFromOrders = filterVal;
console.log(this.pageSizeFromOrders);
}
checkCheckList(orderId: string, orderNumber: number) {
if (this.selectedOrdersId.includes(orderId)) {
let index = this.selectedOrdersId.findIndex((id) => id == orderId);
this.selectedOrdersId.splice(index, 1);
console.log('usuń');
console.log(this.selectedOrdersId);
document.getElementById('button').style.display = 'none';
document.getElementById('row' + orderNumber).style.backgroundColor =
'white';
} else {
this.selectedOrdersId.push(orderId);
console.log('dodaj');
console.log(this.selectedOrdersId);
document.getElementById('button').style.display = 'block';
document.getElementById('row' + orderNumber).style.backgroundColor =
'AntiqueWhite';
}
}
isOrderSlected(orderId: string): boolean {
return this.selectedOrdersId.includes(orderId);
}
toggleAllCheckList() {
if (this.selectedOrdersId.length != this.orderedOrders.getValue().length) {
let allVisibleIds = this.orderedOrders.getValue().map((order) => order.item.id);
this.selectedOrdersId = [];
this.selectedOrdersId = [...allVisibleIds];
console.log('dodajemy wszystko');
console.log(this.selectedOrdersId);
document.getElementById('button').style.display = 'block';
// document.getElementById("row"+ orderNumber).style.backgroundColor = 'AntiqueWhite';
} else {
this.orderedOrders.getValue().map((order) => order.item.id);
this.selectedOrdersId = [];
console.log('usuwamy wszystko');
console.log(this.selectedOrdersId);
document.getElementById('button').style.display = 'none';
}
}
showAddOrder() {
this.addOrderModalRef.show();
}
showOptionOrder() {
let filteredOrders = this.orderedOrders.getValue().filter((orderFromParent) =>
this.selectedOrdersId.includes(orderFromParent.item.id)
);
const initialState = {
selectedOrdersFromParent: JSON.parse(JSON.stringify(filteredOrders)),
selectedOrdersIdsFromParent: Array.from(this.selectedOrdersId),
};
this.optionOrderModalRef = this.modalService.show(OrdersOptionModal, {
initialState,
});
this.optionOrderModalRef.content.updateOrderListEvent.subscribe((data) => {
this.ordersList = data;
});
}
isFilterHidden(columnName: FilterColumnsBy) : Boolean {
return this.selectedColumnNameFilter !== columnName || !this.filterClick
}
switchFilterShow(columnNameClicked: FilterColumnsBy){
if (columnNameClicked === this.selectedColumnNameFilter) {
this.filterClick = !this.filterClick;
} else {
this.selectedColumnNameFilter = columnNameClicked;
this.filterClick = true;
}
}
getArrowClass(columnNameClicked: SortColumnsBy): IconDefinition {
if (columnNameClicked === this.selectedColumnName) {
if (this.isAsc === false) {
return this.faSortUp;
} else {
return this.faSortDown;
}
} else {
return this.faSort;
}
}
sortList(columnNameClicked: SortColumnsBy) {
if (columnNameClicked === this.selectedColumnName) {
this.isAsc = !this.isAsc;
console.log(
'sortowanie tej samej columny zgodnie z: ',
this.isAsc ? 'descending' : 'ascending'
);
} else {
this.selectedColumnName = columnNameClicked;
this.isAsc = true;
console.log(`sortowanie tej columny ${this.selectedColumnName}
zgodnie z: ${this.isAsc ? 'descending' : 'ascending'}`);
}
this.sort();
}
onKeyUpEvent(valueFornInput , valueForInput2) {
setTimeout( () => {
this.filter(valueFornInput , valueForInput2) }, 500);
}
filter(valueForInput, valueForInput2){
switch (this.selectedColumnNameFilter){
case FilterColumnsBy.OrderedByCustomer:
let filterInColumnOne = this.ordersListInitial.filter(order => order.orderedByCustomerFullName.toLowerCase().includes(valueForInput.toLowerCase()))
if (filterInColumnOne === []){
console.log("nie ma nic")
}else{
console.log("jest")
this.ordersList = JSON.parse(JSON.stringify(filterInColumnOne));
}
break;
case FilterColumnsBy.Price:
let fromPrice = valueForInput;
let toPrice = valueForInput2;
let filterInColumnTwo = this.ordersListInitial
.filter(order => order.price >= fromPrice && order.price <= toPrice)
this.ordersList = JSON.parse(JSON.stringify(filterInColumnTwo));
break;
case FilterColumnsBy.OrderedDate:
let from = valueForInput[0].getTime();
let to = valueForInput[1].getTime();
let filterInColumnThree = this.ordersListInitial
.filter(order => order.creationDate.getTime() >= from && order.creationDate.getTime() <= to)
this.ordersList = JSON.parse(JSON.stringify(filterInColumnThree));
break;
case FilterColumnsBy.LastUpdateDate:
let fromLastUpdate = valueForInput[0].getTime();
let toLastUpdate = valueForInput[1].getTime();
let filterInColumnFour = this.ordersListInitial
.filter(order => new Date(order.lastUpdateDate).getTime() >= fromLastUpdate && new Date(order.lastUpdateDate).getTime() <= toLastUpdate)
this.ordersList = JSON.parse(JSON.stringify(filterInColumnFour));
break;
case FilterColumnsBy.DaysOfLastUpdate:
let fromDaysOfLastUpdate = valueForInput;
let toDaysOfLastUpdate = valueForInput2;
let filterInColumnFive = this.ordersListInitial
.filter(order => this.getDays(order.lastUpdateDate) >= fromDaysOfLastUpdate && this.getDays(order.lastUpdateDate) <= toDaysOfLastUpdate)
this.ordersList = JSON.parse(JSON.stringify(filterInColumnFive));
break;
default:
}
}
sort() {
switch (this.selectedColumnName) {
case SortColumnsBy.OrderedByCustomer:
if (this.isAsc) {
this.ordersList = JSON.parse(JSON.stringify(SortHelper.sortingOfElements(this.ordersList, 'orderedByCustomerFullName' )));
} else {
this.ordersList = JSON.parse(JSON.stringify(SortHelper.sortingOfElements(this.ordersList, 'orderedByCustomerFullName', false )));
}
break;
case SortColumnsBy.Price:
if (this.isAsc) {
this.ordersList.sort((a, b) => a.price - b.price);
} else {
this.ordersList.sort((a, b) => b.price - a.price);
}
this.ordersList = JSON.parse(JSON.stringify(this.ordersList));
break;
case SortColumnsBy.OrderedDate:
if (this.isAsc) {
this.ordersList = JSON.parse(JSON.stringify(SortHelper.sortingOfElements(this.ordersList, 'creationDate' )));
} else {
this.ordersList = JSON.parse(JSON.stringify(SortHelper.sortingOfElements(this.ordersList, 'creationDate', false )));
}
break;
case SortColumnsBy.LastUpdateDate:
if (this.isAsc) {
this.ordersList = JSON.parse(JSON.stringify(SortHelper.sortingOfElements(this.ordersList, 'lastUpdateDate' )));
} else {
this.ordersList = JSON.parse(JSON.stringify(SortHelper.sortingOfElements(this.ordersList, 'lastUpdateDate', false )));
}
break;
case SortColumnsBy.DaysOfLastUpdate:
if (this.isAsc) {
this.ordersList = JSON.parse(JSON.stringify(SortHelper.sortingOfElements(this.ordersList, 'lastUpdateDate' )));
} else {
this.ordersList = JSON.parse(JSON.stringify(SortHelper.sortingOfElements(this.ordersList, 'lastUpdateDate', false )));
}
break;
default:
}
}
//filterStatus
sortStatus(value: SortStatus){
this.selectedColumnStatus = value;
switch (+SortStatus[this.selectedColumnStatus]) {
case SortStatus.Delivered:
let orderStatusDelivered = this.orderStatuses.find((status) => status.name == "Delivered").id;
let delivered = this.ordersListInitial.filter(order => order.statusId === orderStatusDelivered)
this.ordersList = JSON.parse(JSON.stringify(delivered));
break;
case SortStatus.Cancelled:
let orderStatusCancelled = this.orderStatuses.find((status) => status.name == "Cancelled").id;
let cancelled = this.ordersListInitial.filter(order => order.statusId === orderStatusCancelled)
this.ordersList = JSON.parse(JSON.stringify(cancelled));
break;
case SortStatus.New:
let orderStatusNew = this.orderStatuses.find((status) => status.name == "New").id;
let newStatus = this.ordersListInitial.filter(order => order.statusId === orderStatusNew);
if(newStatus.length > 1) {
this.ordersList = JSON.parse(JSON.stringify(newStatus));
}
this.ordersList = [];
break;
case SortStatus.OnItsWay:
let orderStatusOnItsWay = this.orderStatuses.find((status) => status.name == "OnItsWay").id;
let onItsWay = this.ordersListInitial.filter(order => order.statusId === orderStatusOnItsWay)
this.ordersList = JSON.parse(JSON.stringify(onItsWay));
break;
case SortStatus.OutForDelivery:
let orderStatusOutForDelivery = this.orderStatuses.find((status) => status.name == "OutForDelivery").id;
let outForDelivery = this.ordersListInitial.filter(order => order.statusId === orderStatusOutForDelivery)
this.ordersList = JSON.parse(JSON.stringify(outForDelivery));
break;
case SortStatus.Packing:
let orderStatusPacking = this.orderStatuses.find((status) => status.name == "Packing").id;
let packing = this.ordersListInitial.filter(order => order.statusId === orderStatusPacking)
this.ordersList = JSON.parse(JSON.stringify(packing));
break;
case SortStatus.Confirmed:
let orderStatusConfirmed = this.orderStatuses.find((status) => status.name == "Confirmed").id;
let confirmed = this.ordersListInitial.filter(order => order.statusId === orderStatusConfirmed)
this.ordersList = JSON.parse(JSON.stringify(confirmed));
break;
case SortStatus.ShowAll:
this.ordersList = JSON.parse(JSON.stringify(this.ordersListInitial));
break;
default:
}
}
}
export enum SortColumnsBy {
Id = 0,
OrderedByCustomer,
Price,
OrderedDate,
LastUpdateDate,
DaysOfLastUpdate,
}
export enum SortStatus{
ShowAll,
Delivered,
Cancelled,
New,
OnItsWay,
OutForDelivery,
Packing,
Confirmed,
}
export enum FilterColumnsBy {
OrderedByCustomer,
Price,
OrderedDate,
LastUpdateDate,
DaysOfLastUpdate,
}<file_sep>
import { Component, OnInit } from '@angular/core';
import { faAngular } from '@fortawesome/free-brands-svg-icons';
import { faBiking, faBook, faCar, faCogs, faDrumstickBite, faDumbbell, faGamepad, faMeteor, faMicrochip, faMountain, faMusic, faRunning, faSkiing, faSkiingNordic, faSnowman, faStopwatch, faTools, faTree, faUtensils } from '@fortawesome/free-solid-svg-icons';
@Component({
selector: 'app-about',
templateUrl: './about.component.html',
styleUrls: ['./about.component.scss']
})
export class AboutComponent implements OnInit {
faDumbbell = faDumbbell;
faUtensils = faUtensils;
faStopwatch = faStopwatch;
faMountain = faMountain;
faMeteor = faMeteor;
faSkiingNordic = faSkiingNordic;
faBiking = faBiking;
faRunning = faRunning;
faSkiing = faSkiing;
faDrumstickBite = faDrumstickBite;
faBook = faBook;
faTree = faTree;
faMusic = faMusic;
faCar = faCar;
faTools = faTools;
faMicrochip = faMicrochip;
faGamepad = faGamepad;
faAngular = faAngular;
faCogs = faCogs;
constructor() { }
ngOnInit() {
}
}
<file_sep>import { Component, EventEmitter, Input, OnInit, Output } from '@angular/core';
import { MyPager, OrderedItem } from '../models/shared.models';
@Component({
selector: 'app-pagination',
templateUrl: './pagination.component.html',
styleUrls: ['./pagination.component.scss'],
})
export class PaginationComponent implements OnInit {
@Input() items = [];
@Input() pageSize = 10;
@Input() startPage = 1;
@Output('pageChangedEventEmitter')
pageChangedEventEmitter: EventEmitter<MyPager> = new EventEmitter<MyPager>();
orderedItems: Array<OrderedItem> = [];
page: number = 1;
pageOfItems: Array<OrderedItem> = [];
maxPages: number;
pages: number[];
constructor() {}
ngOnInit() {
this.setPage(1);
}
ngOnChanges() {
this.setPage(1);
}
setOrderedItems() {
for (let i = 0; i < this.items.length; i++) {
let item = new OrderedItem(i + 1, this.items[i]);
this.orderedItems.push(item);
}
}
countingMaxPages() {
this.maxPages = Math.ceil(this.items.length / this.pageSize);
if (this.maxPages < 1) {
this.maxPages = 1;
}
}
fillingPagesArray() {
this.pages = new Array(this.maxPages).fill(1).map((x, i) => ++i);
}
slicingOrderedItemsForOnePage() {
let numberToSlice = (this.page - 1) * this.pageSize;
this.pageOfItems = this.orderedItems.slice(
numberToSlice,
numberToSlice + this.pageSize
);
}
setPage(page: number) {
if(this.items.length < 1) {
let myPager = new MyPager(null, 1, 1);
this.pageChangedEventEmitter.emit(myPager);
}
this.orderedItems = [];
this.page = page;
this.setOrderedItems();
this.countingMaxPages();
this.fillingPagesArray();
this.slicingOrderedItemsForOnePage();
let myPager = new MyPager(this.pageOfItems, this.page, this.maxPages);
this.pageChangedEventEmitter.emit(myPager);
}
}
<file_sep>export class OrderedItem {
orderNumber: number;
item: any;
constructor(orderNumber: number, item: any) {
this.orderNumber = orderNumber;
this.item = item;
}
}
export class MyPager {
pageOfItems: OrderedItem[];
currentPage: number;
totalPages: number;
constructor(pageOfItems: any[], currentPage: number, totalPages: number) {
this.pageOfItems = pageOfItems;
this.currentPage = currentPage;
this.totalPages = totalPages;
}
}<file_sep>import { CommonModule, CurrencyPipe } from '@angular/common';
import { HttpClientModule } from '@angular/common/http';
import { NgModule } from '@angular/core';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { AlertModule } from 'ngx-bootstrap/alert';
import { ToastrModule } from 'ngx-toastr';
import { CarouselModule } from 'ngx-bootstrap/carousel';
import { FontAwesomeModule } from '@fortawesome/angular-fontawesome';
import { OrdersComponent } from './orders.component';
import { OrdersAddModal } from './orders-add/orders-add-modal';
import { OrdersRoutingModule } from './orders-routing.module';
import { OrdersService } from './orders.service';
import { SharedModule } from '../shared/shared.module';
import { BsModalService, ModalModule } from 'ngx-bootstrap/modal';
import { BsDatepickerModule } from 'ngx-bootstrap/datepicker';
import { OrdersOptionModal } from './orders-option/orders-option-modal';
@NgModule({
declarations: [
OrdersComponent,
OrdersAddModal,
OrdersOptionModal
],
imports: [
CommonModule,
FormsModule,
AlertModule.forRoot(),
ReactiveFormsModule,
ToastrModule.forRoot(),
OrdersRoutingModule,
HttpClientModule,
CarouselModule.forRoot(),
FontAwesomeModule,
SharedModule,
ModalModule.forRoot() ,
BsDatepickerModule.forRoot(),
],
providers: [
OrdersService,
CurrencyPipe,
BsModalService
],
entryComponents: [
OrdersOptionModal
]
})
export class OrdersModule { }<file_sep>export interface Address{
id: string,
city: string,
zipCode: number,
street: string,
country: Country
}
export interface AddressForCreation {
countryId: string;
city: string;
street: string;
zipCode: string;
}
export interface AddressWithResidents {
id: string;
country: string;
city: string;
street: string;
zipCode: string;
residents: Resident[];
}
export interface Resident{
customerId: string;
name: string;
lastName: string;
email: string;
phoneNumber: string;
}
export class Country{
name: string;
code: string;
id: string;
}<file_sep>export interface Order{
id: string,
price: number,
creationDate: Date,
lastUpdateDate?: Date,
orderedByCustomerId: string,
orderedByCustomerFullName: string,
statusId: string
}
export interface OrderStatus{
id: string,
name: string
}
export enum Gender{
male = 0,
female = 1
}
export interface OrdersForCreation {
orderedByCustomerId: string,
price: number,
}
export interface OrderStatusChangeParameters {
orderId: string;
newOrderStatusId: string;
}<file_sep># CustomerManagementPanel
Here is the latest version of the website:
https://kacper-berganski-portfolio.pl/
This site was designed to present acquired skills and was created after consulting with my mentor.
Together we have chosen that the best way it will be work on imitation of real life example.
We decided to build simple portal for managment clients and their orders.
<file_sep>import { Address } from './address';
export class Customer {
id:string;
name:string;
lastName:string;
age:number;
gender:Gender;
address:Address;
addressId: string;
phoneNumber:string;
email:string;
constructor(
id: string,
name: string,
lastName:string,
age:number,
gender:Gender,
address:Address,
addressId: string,
phoneNumber:string,
email:string) {
this.id = id;
this.name = name;
this.lastName = lastName;
this.age = age;
this.gender = gender;
this.address = address;
this.addressId = addressId;
this.phoneNumber = phoneNumber;
this.email = email;
}
}
export enum Gender{
male = 0,
female = 1
}<file_sep>import { Component, EventEmitter, Input, OnInit, Output } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { faArrowLeft, faCity, faEnvelope, faGlobeAmericas, faMailBulk, faPhoneAlt, faRoad, faUserAlt, faVenusMars } from '@fortawesome/free-solid-svg-icons';
import { ToastrService } from 'ngx-toastr';
import { Customer } from '../models/customer';
@Component({
selector: 'app-client-details',
templateUrl: './client-details.component.html',
styleUrls: ['./client-details.component.scss']
})
export class ClientDetailsComponent implements OnInit {
faGlobaleAmericas = faGlobeAmericas;
faCity = faCity;
faMailBulk = faMailBulk;
faRoad = faRoad;
faUserAlt = faUserAlt;
faVenusMars = faVenusMars;
faPhoneAlt = faPhoneAlt;
faEnvelope = faEnvelope;
faArrowLeft = faArrowLeft;
chosenCustomer: Customer;
// @Input("chosenCustomerInput") chosenCustomer: Customer;
copyOfCustomer: Customer;
@Output() editedCustomerEventEmitter = new EventEmitter<Customer>();
constructor(
private toastr: ToastrService,
private route: ActivatedRoute
) {
}
ngOnInit(): void {
this.route.data.subscribe(data =>
this.copyOfCustomer = data['customer']);
console.log(this.copyOfCustomer);
}
saveName(copyOfCustomer){
if(copyOfCustomer.name == this.chosenCustomer.name) {
console.error(`Error: name cannot equals ${copyOfCustomer.name}`);
}else {
this.editedCustomerEventEmitter.emit(copyOfCustomer);
this.toastr.success('Changes were made to the name.', 'Saved');
};
}
saveLastName(copyOfCustomer){
if(copyOfCustomer.lastName == this.chosenCustomer.lastName) {
console.error(`Error: lastName cannot equals ${copyOfCustomer.lastName}`);
}else {
this.editedCustomerEventEmitter.emit(copyOfCustomer);
this.toastr.success('Changes were made to the last name.', 'Saved');
}
}
saveCity(copyOfCustomer){
if(copyOfCustomer.address.id == this.chosenCustomer.address.id) {
console.error(`Error: address cannot equals ${copyOfCustomer.address.id}`);
}else {
this.editedCustomerEventEmitter.emit(copyOfCustomer);
this.toastr.success('Changes were made to the address.', 'Saved');
}
}
saveGender(copyOfCustomer){
if(copyOfCustomer.gender == this.chosenCustomer.gender) {
console.error(`Error: gender cannot equals ${copyOfCustomer.gender}`);
}else {
this.editedCustomerEventEmitter.emit(copyOfCustomer);
this.toastr.success('Gender changes were made.', 'Saved');
}
}
savePhoneNumber(copyOfCustomer){
if(copyOfCustomer.phoneNumber == this.chosenCustomer.phoneNumber) {
console.error(`Error: city cannot equals ${copyOfCustomer.phoneNumber}`);
}else {
this.editedCustomerEventEmitter.emit(copyOfCustomer);
this.toastr.success('Changes were made to the phone number.', 'Saved');
}
}
saveMail(copyOfCustomer){
if(copyOfCustomer.email == this.chosenCustomer.email) {
console.error(`Error: city cannot equals ${copyOfCustomer.email}`);
}else {
this.editedCustomerEventEmitter.emit(copyOfCustomer);
this.toastr.success('Changes were made to the email.', 'Saved');
}
}
}
| 921076dd55352c68d48cbb2d651a603b9c77ae85 | [
"Markdown",
"TypeScript"
] | 42 | TypeScript | AleksandraX/CustomerManagementPanel | 31ffe4be0eada422bfae6b73a4afd2488671c7e8 | ef76cac9e9c10cd21c3d5399b2d32d512118f0b3 |
refs/heads/main | <file_sep>package commands;
import utils.Logger;
import utils.TicketAsker;
import java.io.*;
import java.nio.file.Paths;
import java.util.Scanner;
import java.util.Stack;
import utils.Console;
/**
* Execute the script from file
* **/
public class ExecuteScriptCommand extends AbstractCommand{
private Console console;
private TicketAsker ticketAsker;
private Stack<String> scriptStack = new Stack<>();
public ExecuteScriptCommand(Console console, TicketAsker ticketAsker){
super("execute_script","read and execute script from specified file. " +
"Script contains same commands as user uses in interactive mode");
this.console = console;
this.ticketAsker = ticketAsker;
}
/**
* @see Executable
* @param filePath Путь к файлу
* **/
@Override
public boolean execute(String filePath) {
try{
for (String currentFile : scriptStack){
if (currentFile.equals(filePath)){
Logger.error("Recursion found in ExecuteScriptCommand");
return false;
}
}
scriptStack.addElement(filePath);
Scanner scriptScanner = new Scanner(Paths.get(filePath));
ticketAsker.setScanner(scriptScanner);
console.scriptMode(scriptScanner,console.getStatus());
scriptStack.pop();
return true;
} catch (IOException e){
Logger.error("No such file found");
return false;
}
}
}<file_sep>package DOM;
import utils.Logger;
import java.util.Date;
import java.util.Objects;
/**
* Main element in collection
* **/
public class Ticket {
private static int idCount = 1;
private int id;
private String name;
private Coordinates coordinates;
private Date creationDate;
private double price;
private final double MIN_PRICE = 0D;
private String comment;
private boolean refundable;
private TicketType ticketType;
private Venue venue;
public Ticket(String name, Coordinates coordinates, double price, String comment, boolean refundable, TicketType ticketType, Venue venue){
if (name == null || name.isEmpty() || coordinates == null || price <= 0 || (comment != null && comment.isEmpty())){
Logger.error("incorrect data in Ticket constructor");
System.exit(0);
}
this.id = idCount;
idCount++;
this.name = name;
this.coordinates = coordinates;
this.creationDate = new Date();
this.price = price;
this.comment = comment;
this.refundable = refundable;
this.ticketType = ticketType;
this.venue = venue;
}
public int getId() {
return id;
}
public void setId(int id) {
if (id < 0){
Logger.error("negative id in Ticket setter");
System.exit(0);
}
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
if(name == null || name.isEmpty()){
Logger.error("name value is null or empty in Ticket setter");
System.exit(0);
}
this.name = name;
}
public Coordinates getCoordinates() {
return coordinates;
}
public void setCoordinates(Coordinates coordinates) {
if(coordinates == null){
Logger.error("coordinates value is null in Ticket setter");
System.exit(0);
}
this.coordinates = coordinates;
}
public Date getCreationDate() {
return creationDate;
}
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
}
public double getPrice() {
return price;
}
public void setPrice(double price) {
if(price <= MIN_PRICE){
Logger.error("price is not bigger than 0 in Ticket setter");
System.exit(0);
}
this.price = price;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
if(comment!=null && comment.isEmpty()){
Logger.error("comment is empty");
return;
}
this.comment = comment;
}
public boolean isRefundable() {
return refundable;
}
public void setRefundable(boolean refundable) {
this.refundable = refundable;
}
public TicketType getTicketType() {
return ticketType;
}
public void setTicketType(TicketType ticketType) {
this.ticketType = ticketType;
}
public Venue getVenue() {
return venue;
}
public void setVenue(Venue venue) {
if(venue == null){
Logger.error("venue is null in Ticket setter");
return;
}
this.venue = venue;
}
@Override
public String toString() {
return "Ticket{" +
"id=" + id +
", name='" + name + '\'' +
", coordinates=" + coordinates.toString() +
", creationDate=" + creationDate +
", price=" + price +
", comment='" + comment + '\'' +
", refundable=" + refundable +
", ticketType=" + ticketType +
", venue=" + venue.toString() +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Ticket)) return false;
Ticket ticket = (Ticket) o;
return getId() == ticket.getId() && Double.compare(ticket.getPrice(), getPrice()) == 0 && isRefundable() == ticket.isRefundable() && getName().equals(ticket.getName()) && getCoordinates().equals(ticket.getCoordinates()) && getCreationDate().equals(ticket.getCreationDate()) && Objects.equals(getComment(), ticket.getComment()) && getTicketType() == ticket.getTicketType() && Objects.equals(getVenue(), ticket.getVenue());
}
@Override
public int hashCode() {
return Objects.hash(getId(), getName(), getCoordinates(), getCreationDate(), getPrice(), getComment(), isRefundable(), getTicketType(), getVenue());
}
public int compareTo(Ticket ticket){
return this.getName().length() - ticket.getName().length();
}
public int compare(Ticket ticket1, Ticket ticket2) {
return ticket1.compareTo(ticket2);
}
}
<file_sep>package commands;
import DOM.Ticket;
import utils.CollectionManager;
import utils.Console;
import utils.Logger;
import utils.TicketAsker;
/**
* Updates the element by its key
* **/
public class UpdateCommand extends AbstractCommand{
private CollectionManager collectionManager;
private TicketAsker ticketAsker;
private Console console;
public UpdateCommand(Console console, CollectionManager collectionManager, TicketAsker ticketAsker){
super("update","updates the element with specified id");
this.collectionManager = collectionManager;
this.ticketAsker = ticketAsker;
this.console = console;
}
/**
* @see Executable
* @param id that will be used to perform the update
* **/
@Override
public boolean execute(String id){
try{
if(id == null || id.isEmpty()) {
Logger.error("id is empty or null");
return false;
}
Ticket ticket = collectionManager.read(Integer.parseInt(id));
if(ticket == null) {
console.println("No ticket with such id exists");
}
if(ticketAsker.ask("Do you want to change the name?")) ticket.setName(ticketAsker.askName());
if(ticketAsker.ask("Do you want to change the coordinates?")){
if(ticketAsker.ask("Do you want to change the x coordinate?")) ticket.getCoordinates().setX(ticketAsker.askX());
if(ticketAsker.ask("Do you want to change the y coordinate?")) ticket.getCoordinates().setY(ticketAsker.askY());
}
if(ticketAsker.ask("Do you want to change the price?")) ticket.setPrice(ticketAsker.askPrice());
if(ticketAsker.ask("Do you want to change the comment?")) ticket.setComment(ticketAsker.askComment());
ticket.setRefundable(ticketAsker.ask("Is it refundable?"));
if(ticketAsker.ask("Do you want to change the ticket type?")) ticket.setTicketType(ticketAsker.askTicketType());
if(ticketAsker.ask("Do you want to change the venue?")){
if(ticketAsker.ask("Do you want to change the name of the venue?")) ticket.getVenue().setName(ticketAsker.askName());
if(ticketAsker.ask("Do you want to change the capacity of the venue?")) ticket.getVenue().setCapacity(ticketAsker.askCapacity());
if(ticketAsker.ask("Do you want to change the type of the venue?")) ticket.getVenue().setType(ticketAsker.askVenueType());
if(ticketAsker.ask("Do you want to change the address?")) {
if(ticketAsker.ask("Do you want to change the street of the address?")) ticket.getVenue().getAddress().setStreet(ticketAsker.askStreet());
if(ticketAsker.ask("Do you want to change the street of the zip code?")) ticket.getVenue().getAddress().setZipCode(ticketAsker.askZipCode());
if(ticketAsker.ask("Do you want to change the street of the location?")){
if(ticketAsker.ask("Do you want to change x coordinate?")) ticket.getVenue().getAddress().getTown().setX((int)ticketAsker.askX());
if(ticketAsker.ask("Do you want to change y coordinate?")) ticket.getVenue().getAddress().getTown().setY(ticketAsker.askLocY());
if(ticketAsker.ask("Do you want to change z coordinate?")) ticket.getVenue().getAddress().getTown().setZ(ticketAsker.askZ());
}
}
}
return true;
} catch (NumberFormatException e){
System.out.println("Key must be a number");
return false;
}
}
}<file_sep>package commands;
import DOM.Ticket;
import utils.CollectionManager;
import utils.Console;
import java.util.Arrays;
/**
* Show all refundable values in ascending order
* **/
public class PrintFieldAscendingRefundable extends AbstractCommand{
private Console console;
private CollectionManager collectionManager;
public PrintFieldAscendingRefundable(Console console, CollectionManager collectionManager){
super("print_field_ascending_refundable", "show all refundable values in in ascending order");
this.console = console;
this.collectionManager = collectionManager;
}
/**
* @see Executable
* **/
@Override
public boolean execute(String arg){
Ticket[] tickets = collectionManager.readAll();
int refundableCount = 0;
for (Ticket ticket : tickets){
if(!ticket.isRefundable()) {
console.forcePrintln("false");
refundableCount++;
}
}
for (int i = 0; i<tickets.length - refundableCount; i++){
console.forcePrintln("true");
}
return true;
}
}
<file_sep>package commands;
import DOM.Ticket;
import utils.CollectionManager;
import utils.Console;
import java.util.Iterator;
import java.util.Map;
/**
* Command that show average price of all elements
* **/
public class AverageOfPriceCommand extends AbstractCommand{
private Console console;
private CollectionManager collectionManager;
public AverageOfPriceCommand(Console console, CollectionManager collectionManager){
super("average_of_price", "show average price");
this.console = console;
this.collectionManager = collectionManager;
}
/**
* @see Executable
***/
@Override
public boolean execute(String arg){
if(collectionManager.getSize() == 0){
console.println("Collection is empty");
return false;
}
double avgPrice = 0;
Iterator it = collectionManager.getIterator();
while(it.hasNext()){
double price = ((Map.Entry<Integer, Ticket>) it.next()).getValue().getPrice();
avgPrice+=price;
}
avgPrice /= collectionManager.getSize();
console.forcePrintln("Average price is " + avgPrice);
return true;
}
}
<file_sep>package commands;
import utils.CommandManager;
import utils.Console;
import utils.Logger;
/**
* Prints the information about available commands that command manager contains
* **/
public class HelpCommand extends AbstractCommand{
private CommandManager commandManager;
private Console console;
public HelpCommand(Console console){
super("help","prints the information about available commands");
this.console = console;
}
/**
* @see Executable
* **/
public boolean execute(String arg){
if(commandManager == null){
Logger.error("CommandManager isn't set in HelpCommand");
return false;
}
AbstractCommand[] commands = commandManager.getCommands();
for (AbstractCommand command : commands){
console.forcePrintln(command.getName() + " - " + command.getDescription());
}
return true;
}
public void setCommandManager(CommandManager commandManager) {
this.commandManager = commandManager;
}
}<file_sep>
package commands;
import utils.CollectionManager;
import utils.FileManager;
/**
* Saves the collection to file
* **/
public class SaveCommand extends AbstractCommand{
private FileManager fileManager;
private CollectionManager collectionManager;
private String filePath;
public SaveCommand(FileManager fileManager, CollectionManager collectionManager){
super("save","save collection to file");
this.fileManager = fileManager;
this.collectionManager = collectionManager;
}
/**
* @see Executable
* **/
public boolean execute(String arg){
fileManager.save(collectionManager);
return true;
}
}<file_sep>package utils;
import DOM.Ticket;
import java.util.*;
/**
* Manages the collection
* **/
public class CollectionManager {
private Map<Integer, Ticket> collection = new TreeMap<>();
private Date creationDate = new Date();
public CollectionManager(Map<Integer, Ticket> collection){
this.collection = collection;
}
public int getSize(){
return this.collection.size();
}
public Class getCollectionClass(){
return this.collection.getClass();
}
public Date getCreationDate(){
return this.creationDate;
}
/**
* Create new entry in collection with id generated automatically
* @see Ticket
* **/
public void create(Ticket ticket){
this.collection.put(ticket.getId(), ticket);
}
/**
* Create new entry in collection with specified id
* @see Ticket
* **/
public void create(Integer key, Ticket ticket){
this.collection.put(key, ticket);
}
/**
* Get the ticket by its id
* @see Ticket
* **/
public Ticket read(Integer key){
return this.collection.get(key);
}
/**
* Get all tickets
* **/
public Ticket[] readAll(){
return this.collection.values().toArray(new Ticket[this.collection.values().size()]);
}
/**
* Update information about ticket by its key
* **/
public void update(Integer key, Ticket ticket){
this.collection.replace(key, ticket);
}
/**
* Delete ticket by its key
* **/
public void delete(Integer key){
this.collection.remove(key);
}
/**
* Clear the collection
* **/
public void deleteAll(){
this.collection.clear();
}
/**
* @returns true is collection contains such key
* **/
public boolean contains(Integer key){
return this.collection.containsKey(key);
}
/**
* @returns iterator of collection
* **/
public Iterator getIterator(){
return this.collection.entrySet().iterator();
}
}
<file_sep>package commands;
import utils.CollectionManager;
import utils.Console;
import utils.Logger;
/**
* Removes the element from collections by its key
* **/
public class RemoveKeyCommand extends AbstractCommand{
private CollectionManager collectionManager;
private Console console;
public RemoveKeyCommand(CollectionManager collectionManager, Console console){
super("remove_key","remove the element from collection by its key");
this.collectionManager = collectionManager;
this.console = console;
}
/**
* @see Executable
* @param key that will be used to perform the remove
* **/
@Override
public boolean execute(String key) {
try{
if(key.isEmpty()) {
Logger.error("Key is empty in RemoveKeyCommand");
return false;
}
Integer resultKey = Integer.parseInt(key);
if (collectionManager.getSize() <= 0){
console.println("Collection is already empty");
return false;
}
collectionManager.delete(resultKey);
return true;
} catch (NumberFormatException e){
console.println("Key must be a number");
return false;
}
}
} | 5752c3155a757048bba33e3e983ff1f4c5b4b257 | [
"Java"
] | 9 | Java | revolalexander/reslab5 | 0eace9a02dae63012dcd46482f67812f91bccdd8 | ed6c0ef5d2d943785909ad094a4f633e729ddb96 |
refs/heads/master | <file_sep>package org.bihe.service.producerservices;
import org.bihe.service.HandleCommand;
import org.bihe.service.Service;
public class SendAlaramToCoordinator implements Service {
public SendAlaramToCoordinator() {
// TODO Auto-generated constructor stub
}
@Override
public void execute() {
HandleCommand.sendAlarmToCoordinator();
}
}
| b17ea75c41ff7fb175ae36a2bdb91c7a9584661c | [
"Java"
] | 1 | Java | kianhashemi/Client | e5f54593dc69832584f3745dadfb61c04ed996d6 | 6214df1c75a110bd5c9cf774ca0d03d13f0e4e6a |
refs/heads/master | <repo_name>neupok/EfirDataHubClient<file_sep>/src/ru/binbank/efirdatahub/client/DataHubClient.java
package ru.binbank.efirdatahub.client;
import org.apache.http.HttpEntity;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.NTCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.config.AuthSchemes;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.conn.ssl.NoopHostnameVerifier;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.conn.ssl.TrustStrategy;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.ssl.SSLContextBuilder;
import org.apache.http.util.EntityUtils;
import javax.net.ssl.SSLContext;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.KeyManagementException;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
public abstract class DataHubClient {
private ClientConnectionSettings connectionSettings;
private String token;
DataHubClient(ClientConnectionSettings connectionSettings) {
this.connectionSettings = connectionSettings;
}
protected String postSync(String specificUri, String query) throws KeyStoreException, IOException, KeyManagementException, URISyntaxException, NoSuchAlgorithmException {
// Формирование URI
URI u = new URI(new StringBuilder().append(connectionSettings.baseURI).append(specificUri).toString());
// Формирование credentials
NTCredentials creds = new NTCredentials(connectionSettings.domainUser, connectionSettings.domainPwd, null, connectionSettings.domainName);
CredentialsProvider credsProvider = new BasicCredentialsProvider();
credsProvider.setCredentials(AuthScope.ANY,creds);
// Формирование конфигурации
RequestConfig config = RequestConfig.custom().setProxy(connectionSettings.proxy).setTargetPreferredAuthSchemes(Arrays.asList(AuthSchemes.NTLM)).build();
// Формирование тела сообщения
HttpEntity entity = new StringEntity(query, ContentType.APPLICATION_JSON);
// Сформировать запрос
HttpPost request = new HttpPost(u);
request.setEntity(entity);
request.setConfig(config);
request.addHeader("Accept", "application/json" );
// Создание http-клиента
SSLContext sslContext = SSLContextBuilder.create().
loadTrustMaterial(null, (TrustStrategy) (arg0, arg1) -> true
).build();
/*
create an SSL Socket Factory to use the SSLContext with the trust self signed certificate strategy
and allow all hosts verifier.
SSLConnectionSocketFactory connectionFactory = new SSLConnectionSocketFactory(sslContext, allowAllHosts);
*/
SSLConnectionSocketFactory connectionFactory = new SSLConnectionSocketFactory(
sslContext,
NoopHostnameVerifier.INSTANCE);
// Создание контекста
HttpClientContext clientContext = new HttpClientContext();
clientContext.setCredentialsProvider(credsProvider);
//CloseableHttpClient httpclient = WinHttpClients.custom().setSSLSocketFactory(connectionFactory).build();
CloseableHttpClient httpclient = HttpClients.custom().setSSLSocketFactory(connectionFactory)/*.setDefaultCredentialsProvider(credsProvider)*/.build();
// Вызов метода
CloseableHttpResponse response = httpclient.execute(request, clientContext);
try {
System.out.println("----------------------------------------");
System.out.println(response.getStatusLine());
System.out.println();
return EntityUtils.toString(response.getEntity());
} finally {
response.close();
}
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
}
<file_sep>/src/ru/binbank/efirdatahub/client/ClientConnectionSettings.java
package ru.binbank.efirdatahub.client;
import org.apache.http.HttpHost;
public class ClientConnectionSettings {
public String baseURI;
public HttpHost proxy;
public String domainUser;
public String domainPwd;
public String domainName;
}
<file_sep>/src/ru/binbank/efirdatahub/client/IDisposableResult.java
package ru.binbank.efirdatahub.client;
public interface IDisposableResult {
}
| 1e3adc462e034761417a4ceeb22dd833a3640767 | [
"Java"
] | 3 | Java | neupok/EfirDataHubClient | fa2e69bd4c20f0098c95cb2717142888b1d4971e | a5aa8275767423f2ba385e16c8e31c6f29bb783f |
refs/heads/master | <repo_name>JiaxianGu/waseda-AI-lecture<file_sep>/programming3/fnn.py
import numpy as np
# sigmoid: sigmoid function for matrices
# Parameters
# X: np.ndarray with shape [M, N]
# Output
# Y: np.ndarray with shape [M, N] s.t. Y[i,j] = sigmoid(X[i,j])
def sigmoid(X):
Y = X
# TODO: IMPLEMENT ME
pass
# dsigmoid: the derivative of sigmoid function for matrices
# Parameters
# X: np.ndarray with shape [M, N]
# Output
# Y: np.ndarray with shape [M, N] s.t.
# Y[i,j] is the derivative of sigmoid at X[i,j]
# C.f. the derivative of sigmoid is found at
# https://en.wikipedia.org/wiki/Activation_function
def dsigmoid(X):
# TODO: IMPLEMENT ME
pass
# softmax: softmax function for a bunch of data points
# Parameters
# X: np.ndarray with shape [# of data points, # of features]
# Output
# Y: np.ndarray with shape [# of data points, probabilies] s.t.
# Y[i,j] = softmax^j(X[i])
def softmax(X):
# TODO: IMPLEMENT ME
pass
class FeedforwardNeuralNetwork:
# Parameters
# n_neurons1: the # of neurons at the 1st hidden layer
# n_neurons2: the # of neurons at the 2nd hidden layer
# n_epochs: the # of iterations to run gradient descent
# lr = 0.001: learning rate
# print_loss: whether loss values during training are printed
def __init__(self, n_neurons1, n_neurons2, n_epochs, lr,
print_loss=False):
self.n_neurons1 = n_neurons1
self.n_neurons2 = n_neurons2
self.n_epochs = n_epochs
self.lr = lr
self.print_loss = print_loss
# fit: train this model on training inputs X and outputs Y
# Parameters
# X: training inputs
# np.ndarray (shape: [# of data points, # of features])
# Y: training outputs
# np.ndarray (shape: [# of data points])
def fit(self, X, Y):
n_features = X.shape[1]
n_classes = np.max(Y) + 1
n_data = len(Y)
self.W1 = np.random.randn(n_features, self.n_neurons1)
self.B1 = np.random.rand(1, self.n_neurons1)
self.W2 = np.random.randn(self.n_neurons1, self.n_neurons2)
self.B2 = np.random.rand(1, self.n_neurons2)
self.W3 = np.random.randn(self.n_neurons2, n_classes)
self.B3 = np.random.rand(1, n_classes)
for i in range(self.n_epochs):
A0 = X
# TODO: IMPLEMENT Z1, Z2, Z3
# Zi is the result of applying linear function with
# weights self.Wi and biases self.Bi to Ai-1
# Ai is the result of applying an activation function to Zi
# Activation functions for A1 and A2: sigmoid
# Activation functions for A3: softmax
Z1 = np.zeros((n_data, self.n_neurons1)) # dummy
A1 = sigmoid(Z1) # output from the 1st hidden layer
Z2 = np.zeros((n_data, self.n_neurons2)) # dummy
A2 = sigmoid(Z2) # outoput from the 2nd hidden layer
Z3 = np.zeros((n_data, self.n_classes)) # dummy
A3 = softmax(Z3) # outoupt from the output layer
# Convert class labels to one-hot vectors
expected = np.zeros((n_data, n_classes))
expected[np.arange(n_data), Y] = 1
if self.print_loss:
loss = - np.sum(np.log(A3) * expected) / n_data
print('loss', loss, '@', i, 'epoch')
# Update parameters by gradient descent
grad_Z3 = A3 - expected
grad_W3, grad_B3 = self.grad_params(grad_Z3, A2)
self.W3 -= self.lr * grad_W3
self.B3 -= self.lr * grad_B3
grad_Z2 = np.dot(grad_Z3, self.W3.T) * dsigmoid(Z2)
grad_W2, grad_B2 = self.grad_params(grad_Z2, A1)
self.W2 -= self.lr * grad_W2
self.B2 -= self.lr * grad_B2
grad_Z1 = np.dot(grad_Z2, self.W2.T) * dsigmoid(Z1)
grad_W1, grad_B1 = self.grad_params(grad_Z1, X)
self.W1 -= self.lr * grad_W1
self.B1 -= self.lr * grad_B1
# grad_params: calculates gradients of weights W and biases B of a layer
# Parameters
# A: inputs to the layer (shape: [# of data points, # of features])
# grad_Z: the derivative of the cost function at Z = A * W + B
# (`*` is the matrix multiplicaiton; shape: [# of data points, # of features])
# Output
# grad_W: gradients of W
# grad_B: gradients of B
def grad_params(self, grad_Z, A):
assert grad_Z.shape[0] == A.shape[0]
n_data = grad_Z.shape[0]
n_features_Z = grad_Z.shape[1]
n_features_A = A.shape[1]
grad_W = np.sum(np.dot(A.T, grad_Z), axis=0) / n_data
grad_B = np.sum(grad_Z, axis=0, keepdims=True) / n_data
return grad_W, grad_B
# predict: classify given data points
# Parameters
# X: inputs to the classifier
# np.ndarray (shape: [# of data points, # of features])
# Output
# Y: classificaiton results
# np.ndarray (shape: [# of data points]) s.t.
# Y[i] is the label predicted for data point X[i]
def predict(self, X):
# TODO: IMPLEMENT ME
# Hint
# self.W1, self.W2: weight parameters at the 1st and 2nd layer, resp.
# self.B1, self.B2: bias parameters at the 1st and 2nd layer, resp.
# Activation functions at hidden layers are sigmoid
# The activation function at the output layer is softmax
#
# Tips
# `np.argmax` may be useful.
# Let A be np.ndarray with shape [M, N]. Then, `np.argmax(A, axis=1)`
# returns the np.ndarray B with shape [M] s.t. B[i] is the index of
# the maximum value among A[i]
pass
if __name__ == '__main__':
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
np.random.seed(0)
iris_dataset = load_iris()
X_train, X_test, Y_train, Y_test = train_test_split(iris_dataset.data,
iris_dataset.target,
random_state=0)
# If print_loss is set to True, the loss values will be shown
print_loss = False
n_neurons1 = 100 # the # of neurons at the 1st hidden layer
n_neurons2 = 100 # the # of neurons at the 2nd hidden layer
n_epochs = 1000 # the # of iterations to run gradient descent
lr = 0.001 # learning rate
fnn = FeedforwardNeuralNetwork(n_neurons1, n_neurons2, n_epochs, lr,
print_loss=print_loss)
fnn.fit(X_train, Y_train)
X_test_predict = fnn.predict(X_test)
accuracy = np.sum(Y_test == X_test_predict) / Y_test.shape[0]
assert accuracy > 0.7
print('acc:', accuracy)
| 61743c863cc894f10d8919ccf7b97c98f13be335 | [
"Python"
] | 1 | Python | JiaxianGu/waseda-AI-lecture | 568eae15bbd0fba40562910a943c6aa84fab831e | d5ff8efd33fa3631762b137c49b7ae53a64bff3f |
refs/heads/master | <repo_name>CaitSith2/KTANE-Mods-Samfun<file_sep>/Skewed Slots/Assets/SkewedSlotsModule/Scripts/SkewedModule.cs
using UnityEngine;
using System;
using System.Collections;
using BombInfoExtensions;
using System.Collections.Generic;
public class SkewedModule : MonoBehaviour
{
public GameObject[] Slots;
public KMSelectable Submit;
public KMAudio BombAudio;
public KMBombModule BombModule;
public KMBombInfo BombInfo;
int[] Numbers = new int[3];
int[] Display = new int[3];
int[] Solution = new int[3];
bool moduleActivated = false;
bool solved = false;
bool firstSpin = true;
string ruleLog = "(Rule Log)";
int[] fibonacci = {1, 1, 2, 3, 5, 8, 13, 21, 34, 55};
static int idCounter = 1;
int moduleID;
void LogRule(string msg)
{
ruleLog += "\n" + msg;
}
public static int Count(IEnumerable source)
{
int c = 0;
var e = source.GetEnumerator();
while (e.MoveNext())
{
c++;
}
return c;
}
public static bool isPrime(int number)
{
int boundary = (int)Math.Floor(Math.Sqrt(number));
if (number <= 1) return false;
if (number == 2) return true;
for (int i = 2; i <= boundary; ++i)
{
if (number % i == 0) return false;
}
return true;
}
void DebugMsg(string msg)
{
Debug.LogFormat("[Skewed Slots #{0}] {1}", moduleID, msg);
}
int Random(int min, int max)
{
return UnityEngine.Random.Range(min, max + 1);
}
void ButtonPress(KMSelectable Selectable)
{
Selectable.AddInteractionPunch();
BombAudio.PlayGameSoundAtTransform(KMSoundOverride.SoundEffect.ButtonPress, transform);
}
private IEnumerator SpinSlots(int state)
{
// State:
// 0 - Module was just activated.
// 1 - Mistake was made and will HandleStrike()
// 2 - Module was solved and will HandlePass()
moduleActivated = false;
ruleLog = "(Rule Log)";
int[] spins = {40 + Random(-5, 5), 60 + Random(-5, 5), 80 + Random(-5, 5) };
while (spins[2] > 0)
{
for (int slotnumber = 0; slotnumber < 3; slotnumber++)
{
if (spins[slotnumber] > 0)
{
int number = (Display[slotnumber] + 1) % 10;
Numbers[slotnumber] = number;
Display[slotnumber] = number;
spins[slotnumber] = spins[slotnumber] - 1;
}
if (spins[slotnumber] == 0 && state == 2) {
Display[slotnumber] = 10;
}
}
UpdateSlots();
yield return new WaitForSeconds(.03f);
}
if (state < 2)
{
LogRule("Initial State: " + Numbers[0] + ", " + Numbers[1] + ", " + Numbers[2] + ".");
for (int slotnumber = 0; slotnumber < 3; slotnumber++)
{
Solution[slotnumber] = ApplyRules(Numbers[slotnumber], slotnumber);
}
LogRule("\nFinal State: " + Solution[0] + ", " + Solution[1] + ", " + Solution[2] + ".");
DebugMsg(ruleLog);
moduleActivated = true;
if (state == 1)
{
BombModule.HandleStrike();
}
}
else
{
solved = true;
BombModule.HandlePass();
}
}
void Start()
{
BombModule.OnActivate += ActivateModule;
Submit.OnInteract += delegate ()
{
ButtonPress(Submit);
if (moduleActivated)
{
DebugMsg("Submitted: " + Display[0] + " " + Display[1] + " " + Display[2]);
if (Display[0] == Solution[0] && Display[1] == Solution[1] && Display[2] == Solution[2])
{
StartCoroutine(SpinSlots(2));
}
else
{
StartCoroutine(SpinSlots(1));
}
}
return false;
};
foreach (GameObject slot in Slots)
{
int slotnumber = int.Parse(slot.name.Substring(4, 1));
KMSelectable up = slot.transform.Find("Up").gameObject.GetComponent<KMSelectable>() as KMSelectable;
KMSelectable down = slot.transform.Find("Down").gameObject.GetComponent<KMSelectable>() as KMSelectable;
up.OnInteract += delegate ()
{
if (moduleActivated)
{
ButtonPress(up);
Display[slotnumber] = (Display[slotnumber] + 1) % 10;
UpdateSlots();
}
return false;
};
down.OnInteract += delegate ()
{
ButtonPress(down);
if (moduleActivated)
{
Display[slotnumber] = Display[slotnumber] - 1;
if (Display[slotnumber] == -1)
{
Display[slotnumber] = 9;
}
UpdateSlots();
}
return false;
};
}
}
int ApplyRules(int digit, int slotnumber)
{
int correct = digit;
LogRule("\nCalculating slot #" + (slotnumber + 1) + ". Starting at: " + digit);
// All digits
switch (correct)
{
case 2:
correct = 5;
LogRule("2 is actually 5.");
break;
case 7:
correct = 0;
LogRule("7 is actually 0.");
break;
}
string serial = BombInfo.GetSerialNumber();
int lit = Count(BombInfo.GetOnIndicators());
int unlit = Count(BombInfo.GetOffIndicators());
correct = correct + lit - unlit;
LogRule("Added indicators (" + lit + " - " + unlit + "). New number: " + correct);
if (correct % 3 == 0)
{
LogRule("Number is a multiple of 3. Number + 4");
correct += 4;
}
else if (correct > 7)
{
LogRule("Number is greater than 7. Number * 2");
correct *= 2;
}
else if (correct < 3 && correct % 2 == 0)
{
LogRule("Number is even and less than 3. Number / 2");
correct /= 2;
}
else if (BombInfo.IsPortPresent("StereoRCA") || BombInfo.IsPortPresent("PS2"))
{
LogRule("RCA or PS/2 port present. Skip this section.");
// Skip the rest of the rules
}
else
{
LogRule("Added battery count to the original number for new number. (" + BombInfo.GetBatteryCount() + ")");
correct = digit + BombInfo.GetBatteryCount();
}
LogRule("After the the first section: " + correct);
// Specific digits
if (slotnumber == 0)
{
if (correct % 2 == 0 && correct > 5)
{
LogRule("Number is even and greater than 5. # / 2.");
correct /= 2;
}
else if (isPrime(correct))
{
LogRule("Number is prime. Added rightmost serial number.");
correct += int.Parse(serial[serial.Length - 1].ToString());
}
else if (BombInfo.IsPortPresent("Parallel"))
{
LogRule("Parallel port present. Number * -1.");
correct *= -1;
}
else if (Numbers[1] % 2 == 1)
{
LogRule("Second slot was originally odd. Leave the number unchanged.");
// Leave the digit unchanged.
}
else
{
LogRule("No other rules apply. Number - 2.");
correct -= 2;
}
}
else if (slotnumber == 1)
{
int index = Array.IndexOf(fibonacci, correct);
if (BombInfo.IsIndicatorOff("BOB"))
{
LogRule("Bob helped you out. Leave the number unchanged.");
// Leave the digit unchanged.
}
else if (correct == 0)
{
LogRule("The number is 0. Add the original digit in the first slot.");
correct += Numbers[0];
}
else if (index > -1)
{
LogRule("Number is in the fibonacci sequence. Added the next digit: " + fibonacci[index + 1]);
correct += fibonacci[index + 1];
}
else if (correct >= 7)
{
LogRule("Number greater than or equal to 7. Number + 4.");
correct += 4;
}
else
{
LogRule("No other rules apply. Number * 3.");
correct *= 3;
}
}
else if (slotnumber == 2)
{
if (BombInfo.IsPortPresent("Serial"))
{
int largest = 0;
foreach (char c in serial)
{
int value;
if (int.TryParse(c.ToString(), out value))
{
if (value > largest)
{
largest = value;
}
}
}
correct += largest;
LogRule("Serial port present. Added the largest serial number: " + largest);
}
else if (digit == Numbers[0] || digit == Numbers[1])
{
LogRule("The original digit is the same as another. Leave the number unchanged.");
// Leave the digit unchanged.
}
else if (correct >= 5)
{
int total = 0;
foreach (char c in Convert.ToString(digit, 2).ToCharArray())
{
if (c.ToString() == "1")
{
total = total + 1;
}
}
correct = total;
LogRule("Number is greater than or equal to 5. Changed the number to the total of the binary form of the original digit.");
}
else
{
LogRule("No other rules apply. Number + 1.");
correct += 1;
}
}
LogRule("After the second section: " + correct);
while (correct > 9)
{
correct = correct - 10;
}
while (correct < 0)
{
correct = correct + 10;
}
LogRule("Final digit: " + correct);
return correct;
}
void UpdateSlots()
{
int slotnumber = 0;
foreach (GameObject slot in Slots)
{
TextMesh text = slot.transform.Find("Number").gameObject.GetComponent<TextMesh>() as TextMesh;
if (Display[slotnumber] < 10)
{
text.text = Display[slotnumber].ToString();
} else {
text.text = "!";
}
slotnumber++;
}
}
void ActivateModule()
{
moduleID = idCounter++;
for (int slotnumber = 0; slotnumber < 3; slotnumber++)
{
int number = Random(0, 9);
Numbers[slotnumber] = number;
Display[slotnumber] = number;
}
StartCoroutine(SpinSlots(0));
}
public IEnumerator ProcessTwitchCommand(string command)
{
string[] split = command.ToLowerInvariant().Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
if (split.Length == 4 && split[0] == "submit")
{
int slot0, slot1, slot2;
if (int.TryParse(split[1], out slot0) && int.TryParse(split[2], out slot1) && int.TryParse(split[3], out slot2))
{
List<int> submit = new List<int>() { slot0, slot1, slot2 };
foreach (int num in submit)
{
if (num < 0 || num > 9)
{
yield break;
}
}
foreach (GameObject slot in Slots)
{
int slotnumber = int.Parse(slot.name.Substring(4, 1));
KMSelectable up = slot.transform.Find("Up").gameObject.GetComponent<KMSelectable>() as KMSelectable;
KMSelectable down = slot.transform.Find("Down").gameObject.GetComponent<KMSelectable>() as KMSelectable;
int diff = Display[slotnumber] - submit[slotnumber];
for (int i = 0; i < Math.Abs(diff); i++)
{
(diff > 0 ? down : up).OnInteract();
yield return new WaitForSeconds(0.1f);
}
}
if (Display[0] == Solution[0] && Display[1] == Solution[1] && Display[2] == Solution[2])
{
yield return "solve";
}
else
{
yield return "strike";
}
Submit.OnInteract();
}
}
}
}
<file_sep>/PacingExtender/Assets/PacingExtender/PacingExtender.cs
using UnityEngine;
using UnityEngine.UI;
using System;
using System.Linq;
using System.Reflection;
using System.Collections;
using System.Collections.Generic;
using BetterModSettings;
public class EventSettings
{
public bool Enabled = true;
public float Weight = float.NaN;
public float MinRating = float.NaN;
}
public class PacingSettings
{
public int Min = 120;
public int Max = 300;
public int AbsoluteMinimum = 45;
public bool Debug = false;
public Dictionary<string, EventSettings> EventSettings = new Dictionary<string, EventSettings>();
}
[RequireComponent(typeof(KMService))]
[RequireComponent(typeof(KMGameInfo))]
public class PacingExtender : MonoBehaviour
{
static BindingFlags NonPublic = BindingFlags.Instance | BindingFlags.NonPublic;
static BindingFlags Public = BindingFlags.Instance | BindingFlags.Public;
public GameObject UI;
public GameObject ActiveInd;
public GameObject SuccessInd;
public GameObject NextEvent;
List<PacingEvent> Events = new List<PacingEvent>();
ModSettings BetterSettings = new ModSettings("PacingExtender", typeof(PacingSettings));
PacingSettings Settings;
static void Log(object format, params object[] formatting)
{
Debug.LogFormat("[PacingExtender] " + format, formatting);
}
List<object> GetIdleEvents(IList actions)
{
List<object> events = new List<object>(); ;
foreach (object pacingEvent in actions)
{
object value = pacingEvent.GetType().GetProperty("EventType", Public).GetValue(pacingEvent, null);
if (value.ToString() == "Idle_DoingWell")
{
events.Add(pacingEvent);
}
}
return events;
}
float timeLeft = 0;
int minTime = 20;
int maxTime = 60;
public static Type FindType(string qualifiedTypeName)
{
Type t = Type.GetType(qualifiedTypeName);
if (t != null)
{
return t;
}
else
{
foreach (Assembly asm in AppDomain.CurrentDomain.GetAssemblies())
{
t = asm.GetType(qualifiedTypeName);
if (t != null)
return t;
}
return null;
}
}
class PacingEvent : MonoBehaviour
{
Action _legacyAction = null;
float _executionTime = 0;
Func<IEnumerator> _funcAction = null;
public string _name = "";
public float _minDiff = 0;
public float _weight = 0;
public float _cooldown = 0;
public float _timeStamp = 0;
public PacingEvent(object PacingAction, Func<string, EventSettings> GetEventSettings)
{
Type _pacingActionType = PacingAction.GetType();
_legacyAction = (Action) _pacingActionType.GetField("Action", Public).GetValue(PacingAction);
_name = (string) _pacingActionType.GetProperty("Name", Public).GetValue(PacingAction, null);
switch (_name)
{
case "Cut the lights":
_executionTime = 12.181f;
break;
case "Turn on Alarm Clock":
_executionTime = 5;
break;
default:
Log("Unhandled PacingAction: " + _name);
break;
}
EventSettings settings = GetEventSettings(_name);
_minDiff = float.IsNaN(settings.MinRating) ? 0 : settings.MinRating;
_weight = float.IsNaN(settings.Weight) ? 1 : settings.Weight;
if (float.IsNaN(settings.MinRating))
{
settings.MinRating = _minDiff;
}
if (float.IsNaN(settings.Weight))
{
settings.Weight = _weight;
}
}
public PacingEvent(Func<IEnumerator> Event, string name, float minDiff, float weight, float cooldown, EventSettings settings)
{
_funcAction = Event;
_name = name;
_minDiff = float.IsNaN(settings.MinRating) ? minDiff : settings.MinRating;
_weight = float.IsNaN(settings.Weight) ? weight : settings.Weight;
_cooldown = cooldown;
if (float.IsNaN(settings.MinRating))
{
settings.MinRating = _minDiff;
}
if (float.IsNaN(settings.Weight))
{
settings.Weight = _weight;
}
}
public IEnumerator ExecuteAction()
{
Log("Executing PacingEvent: " + _name);
if (_legacyAction != null)
{
_legacyAction();
yield return new WaitForSeconds(_executionTime);
}
else
{
IEnumerator enumerator = _funcAction();
while (enumerator.MoveNext())
{
yield return enumerator.Current;
}
}
}
}
EventSettings GetEventSettings(string name)
{
if (Settings.EventSettings.ContainsKey(name))
{
return Settings.EventSettings[name];
}
else
{
EventSettings NewSettings = new EventSettings();
Settings.EventSettings.Add(name, NewSettings);
return NewSettings;
}
}
public void RegisterEvent(Func<IEnumerator> Event, string name, float minDiff, float weight, float cooldown)
{
EventSettings settings = GetEventSettings(name);
if (settings.Enabled)
{
Events.Add(new PacingEvent(Event, name, minDiff, weight, cooldown, settings));
}
}
IEnumerator WaitForPaceMaker()
{
Settings = (PacingSettings) BetterSettings.Settings;
// Validate config file.
if (Settings.Min > Settings.Max)
{
Settings.Min = Settings.Max;
}
if (Settings.AbsoluteMinimum > Settings.Min)
{
Settings.AbsoluteMinimum = Settings.Min;
}
yield return new WaitUntil(() => { _paceMakerObj = GameObject.Find("PaceMaker"); return _paceMakerObj != null; });
minTime = Math.Max(Math.Min(Settings.Min, Settings.Max), 0);
maxTime = Math.Max(Settings.Max, minTime);
timeLeft = UnityEngine.Random.Range(minTime, maxTime);
UI.SetActive(Settings.Debug);
object paceMaker = _paceMakerObj.GetComponent("PaceMaker");
IList actions = (IList) _actions.GetValue(paceMaker);
yield return new WaitUntil(() => (bool) _isActive.GetValue(paceMaker));
_populatePacingEvents.Invoke(paceMaker, null);
bool actionsEnabled = actions.Count > 0;
object mission = _mission.GetValue(paceMaker);
if ((string) _ID.GetValue(mission, null) != "freeplay" && actionsEnabled)
{
activeImg.color = Color.yellow;
yield return new WaitUntil(() =>
{
List<object> idle = GetIdleEvents(actions);
eventCount.text = idle.Count.ToString();
return idle.Count == 0;
});
_populatePacingEvents.Invoke(paceMaker, null);
}
_isActive.SetValue(paceMaker, false);
if (actionsEnabled)
{
eventCount.text = GetIdleEvents(actions).Count.ToString();
activeImg.color = Color.green;
}
else
{
activeImg.color = Color.gray;
}
Events.AddRange(GetIdleEvents(actions).Select((object PacingAction) => new PacingEvent(PacingAction, GetEventSettings)).Where(e => GetEventSettings(e._name).Enabled));
yield return new WaitForSeconds(1f);
while (_paceMakerObj != null)
{
if (actionsEnabled)
{
float success = CalculateSuccess();
timeLeft -= success;
if (timeLeft <= 0)
{
timeLeft = UnityEngine.Random.Range(minTime, maxTime);
PacingEvent[] validEvents = Events.Where(e => e._timeStamp <= Time.time && e._minDiff <= success).ToArray();
if (validEvents.Length == 0)
{
Log("Unable to find any events to play! Skipping an event this time.");
}
else
{
// Pick a random event based on weights.
float targetWeight = UnityEngine.Random.Range(0, validEvents.Sum(e => e._weight));
float currentWeight = 0;
PacingEvent idleEvent = null;
foreach (PacingEvent e in validEvents)
{
currentWeight += e._weight;
if (currentWeight >= targetWeight)
{
idleEvent = e;
break;
}
}
IEnumerator enumerator = idleEvent.ExecuteAction();
while (enumerator.MoveNext())
{
yield return enumerator.Current;
}
idleEvent._timeStamp = Time.time + idleEvent._cooldown;
}
}
eventTime.text = timeLeft.ToString("n2");
percent.text = Math.Round((decimal) success * 100, 0, MidpointRounding.AwayFromZero) + "%";
}
yield return new WaitForSeconds(1f);
if (actionsEnabled)
{
activeImg.color = Color.green;
}
}
OnRoundEnded();
}
void OnRoundEnded()
{
activeImg.color = Color.red;
if (_paceMakerObj != null) // Silly PaceMaker, you can't pace after a bomb!
{
Destroy(_paceMakerObj);
_paceMakerObj = null;
}
BetterSettings.Settings = Settings; // Any Event settings should have been added by now, so we can update the users config file.
Events.Clear();
UI.SetActive(false);
}
float CalculateSuccess()
{
object[] Bombs = FindObjectsOfType(_bombType);
if (Bombs.Length > 0)
{
float worstRating = Settings.Min / (float) Settings.AbsoluteMinimum;
float bestTimeRemaining = 0;
float maxTimeRemaining = 0;
int totalModules = 0;
int totalSolved = 0;
foreach (object bomb in Bombs) totalModules += (int) _getSolvableMethod.Invoke(bomb, null);
foreach (object bomb in Bombs)
{
int thisSolved = (int) _getSolvedMethod.Invoke(bomb, null);
totalSolved += thisSolved;
object timer = _timer.GetValue(bomb);
float curTime = (float) _timeRemaining.GetValue(timer) / (float) _rateModifier.GetValue(timer);
float maxTime = (float) _totalTime.GetValue(bomb);
maxTimeRemaining = Math.Max(maxTimeRemaining, maxTime);
bestTimeRemaining = Mathf.Max(bestTimeRemaining, curTime);
worstRating = Mathf.Min(worstRating, CalculateRating((int) _getSolvableMethod.Invoke(bomb, null) - thisSolved, totalModules, curTime, maxTime));
}
worstRating = Mathf.Min(worstRating, CalculateRating(totalModules - totalSolved, totalModules, bestTimeRemaining, maxTimeRemaining));
return worstRating;
}
else return 1;
}
float CalculateRating(int remain, int total, float timeLeft, float timeTotal)
{
float pace = total / timeTotal;
float curPace = remain / timeLeft;
float rating = pace / curPace;
rating = Mathf.Min(Mathf.Max(rating, 0), 1.5f);
return rating;
}
#region Type Definitions
// Used in WaitForPaceMaker()
Type _paceMakerType;
FieldInfo _isActive;
MethodInfo _populatePacingEvents;
FieldInfo _actions;
FieldInfo _mission;
PropertyInfo _ID;
GameObject _paceMakerObj;
// Used in CalculateSuccess()
Type _bombType;
MethodInfo _getSolvableMethod;
MethodInfo _getSolvedMethod;
FieldInfo _numStrikes;
FieldInfo _numStrikesToLose;
FieldInfo _totalTime;
FieldInfo _timer;
Type _timerType;
FieldInfo _timeRemaining;
FieldInfo _rateModifier;
// Used for UI
Image activeImg = null;
Text eventCount = null;
Text eventTime = null;
Text percent = null;
#endregion
void Start()
{
#region Type Assignments
_paceMakerType = FindType("Assets.Scripts.Pacing.PaceMaker");
_isActive = _paceMakerType.GetField("isActive", NonPublic);
_populatePacingEvents = _paceMakerType.GetMethod("PopulatePacingActions", NonPublic);
_actions = _paceMakerType.GetField("actions", NonPublic);
_mission = _paceMakerType.GetField("mission", NonPublic);
_ID = FindType("Assets.Scripts.Missions.Mission").GetProperty("ID", Public);
_bombType = FindType("Bomb");
_getSolvableMethod = _bombType.GetMethod("GetSolvableComponentCount", Public);
_getSolvedMethod = _bombType.GetMethod("GetSolvedComponentCount", Public);
_totalTime = _bombType.GetField("TotalTime", Public);
_timer = _bombType.GetField("timer", NonPublic);
_timerType = FindType("TimerComponent");
_timeRemaining = _timerType.GetField("TimeRemaining", Public);
_rateModifier = _timerType.GetField("rateModifier", NonPublic);
activeImg = ActiveInd.GetComponent<Image>();
eventCount = ActiveInd.transform.Find("Text").GetComponent<Text>();
eventTime = NextEvent.transform.Find("Text").GetComponent<Text>();
percent = SuccessInd.transform.Find("Text").GetComponent<Text>();
#endregion
Coroutine _mainCoroutine = null;
GetComponent<KMGameInfo>().OnStateChange = delegate (KMGameInfo.State state)
{
if (state == KMGameInfo.State.Gameplay)
{
_mainCoroutine = StartCoroutine(WaitForPaceMaker());
} else if (_mainCoroutine != null)
{
StopCoroutine(_mainCoroutine);
_mainCoroutine = null;
OnRoundEnded();
}
};
}
}
<file_sep>/BetterCasePicker/Assets/BetterCasePicker/BetterCasePicker.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using UnityEngine;
public class BetterCasePicker : MonoBehaviour
{
public Type FindType(string fullName)
{
return AppDomain.CurrentDomain.GetAssemblies().SelectMany(a => a.GetTypes()).FirstOrDefault(t => t.FullName.Equals(fullName));
}
Type _gameplayStateType;
FieldInfo _missionToLoadField;
FieldInfo _freeplaySettingsField;
Type _missionManagerType;
PropertyInfo _instanceProperty;
MethodInfo _getMissionMethod;
Type _missionType;
FieldInfo _generatorSettingField;
Type _generatorSettingType;
MethodInfo _getComponentCountMethod;
FieldInfo _frontFaceOnlyField;
object bombGenerator;
Type _bombGeneratorType;
FieldInfo _bombPrefabOverrideField;
FieldInfo _bombPrefabPoolField;
Type _objectPoolType;
FieldInfo _objectsField;
FieldInfo _defaultField;
void Start()
{
BindingFlags Public = BindingFlags.Public | BindingFlags.Instance;
BindingFlags Static = BindingFlags.Public | BindingFlags.Static;
_gameplayStateType = FindType("GameplayState");
_missionToLoadField = _gameplayStateType.GetField("MissionToLoad", Static);
_freeplaySettingsField = _gameplayStateType.GetField("FreeplaySettings", Static);
_missionManagerType = FindType("Assets.Scripts.Missions.MissionManager");
_instanceProperty = _missionManagerType.GetProperty("Instance", Static);
_getMissionMethod = _missionManagerType.GetMethod("GetMission", Public);
_missionType = FindType("Assets.Scripts.Missions.Mission");
_generatorSettingField = _missionType.GetField("GeneratorSetting", Public);
_generatorSettingType = FindType("Assets.Scripts.Missions.GeneratorSetting");
_getComponentCountMethod = _generatorSettingType.GetMethod("GetComponentCount", Public);
_frontFaceOnlyField = _generatorSettingType.GetField("FrontFaceOnly", Public);
_bombGeneratorType = FindType("BombGenerator");
_bombPrefabOverrideField = _bombGeneratorType.GetField("BombPrefabOverride", Public);
_bombPrefabPoolField = _bombGeneratorType.GetField("BombPrefabPool", Public);
_objectPoolType = FindType("ObjectPool");
_objectsField = _objectPoolType.GetField("Objects", Public);
_defaultField = _objectPoolType.GetField("Default", Public);
GetComponent<KMGameInfo>().OnStateChange += delegate (KMGameInfo.State state)
{
if (state == KMGameInfo.State.Gameplay)
{
bombGenerator = FindObjectOfType(_bombGeneratorType);
if (_bombPrefabOverrideField.GetValue(bombGenerator) == null) // Don't replace the bomb prefab if there is already one.
{
int componentCount = 0;
bool frontFaceOnly = false;
string missionID = (string) _missionToLoadField.GetValue(null);
if (missionID == "freeplay")
{
object freeplaySettings = _freeplaySettingsField.GetValue(null);
componentCount = (int) freeplaySettings.GetType().GetField("ModuleCount", Public).GetValue(freeplaySettings);
}
else
{
object mission = _getMissionMethod.Invoke(_instanceProperty.GetValue(null, null), new object[] { missionID });
object generatorSetting = _generatorSettingField.GetValue(mission);
frontFaceOnly = (bool) _frontFaceOnlyField.GetValue(generatorSetting);
componentCount = (int) _getComponentCountMethod.Invoke(generatorSetting, null);
}
componentCount += 1; // We need one spot for the timer as well.
object prefabPool = _bombPrefabPoolField.GetValue(bombGenerator);
List<GameObject> gameObjects = (List<GameObject>) _objectsField.GetValue(prefabPool);
var bombcases = gameObjects
.Where(gameobject => gameobject.GetComponent<KMBomb>() != null)
.ToDictionary(gameobject => gameobject, gameobject =>
{
if (!frontFaceOnly)
{
return gameobject.GetComponent<KMBomb>().Faces.Select(face => face.Anchors.Count).Sum();
}
else
{
return gameobject.GetComponent<KMBomb>().Faces[0].Anchors.Count;
}
});
bombcases.Add((GameObject) _defaultField.GetValue(prefabPool), (!frontFaceOnly ? 12 : 6));
if (bombcases.Count == 0)
{
return;
}
var validBombCases = bombcases.Where(pair => pair.Value >= componentCount);
if (validBombCases.Count() == 0)
{
_bombPrefabOverrideField.SetValue(bombGenerator, PickBySize(bombcases, bombcases.Max(x => x.Value)));
}
else
{
_bombPrefabOverrideField.SetValue(bombGenerator, PickBySize(validBombCases, validBombCases.Min(x => x.Value)));
}
}
}
};
}
GameObject PickBySize(IEnumerable<KeyValuePair<GameObject, int>> bombCases, int size)
{
var matchingSizes = bombCases.Where(x => x.Value == size).Select(x => x.Key);
return matchingSizes.ElementAt(new System.Random().Next(matchingSizes.Count()));
}
}
<file_sep>/SoundpackMaker/Assets/SoundpackMaker/SoundpackMaker.cs
using UnityEngine;
using System;
using System.Collections;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Collections.Generic;
[RequireComponent(typeof(KMService))]
public class SoundpackMaker : MonoBehaviour
{
private string SoundsDirectory
{
get
{
return Path.Combine(Application.persistentDataPath, "Soundpacks");
}
}
void Log(params object[] objects)
{
Debug.LogFormat("[SoundpackMaker] " + string.Join(", ", objects.Select(obj => Convert.ToString(obj)).ToArray()));
}
void Log(string text, params object[] formatting)
{
Debug.LogFormat("[SoundpackMaker] " + text, formatting);
}
Type SoundEffect = typeof(KMSoundOverride.SoundEffect);
string[] audioExtensions = new string[] {
".wav",
".mp3",
".ogg"
};
AudioClip MakeAudioClip(string path)
{
string ext = Path.GetExtension(path);
if (audioExtensions.Contains(ext))
{
try
{
if (ext == ".mp3")
{
return NAudioPlayer.FromMp3Data(new WWW("file:///" + path).bytes);
}
else
{
AudioClip clip = new WWW("file:///" + path).GetAudioClipCompressed();
while (clip.loadState != AudioDataLoadState.Loaded)
{
}
return clip;
}
}
catch (Exception ex)
{
Log("Failed to load sound sound file {0} due to Exception: {1}\nStack Trace {2}", path, ex.Source, ex.StackTrace);
}
}
return null;
}
// Case insensitve version of Enum.IsDefined
bool IsDefined(Type enumType, string name)
{
return Enum.GetNames(enumType).Any(enumName => enumName.ToLowerInvariant() == name.ToLowerInvariant());
}
List<KMSoundOverride> LoadSoundpack(string soundpackDirectory)
{
List<KMSoundOverride> Overrides = new List<KMSoundOverride>();
foreach (string file in Directory.GetFiles(soundpackDirectory))
{
string fileName = Path.GetFileNameWithoutExtension(file);
if (IsDefined(SoundEffect, fileName))
{
Log("Creating AudioClip for {0}.", fileName);
AudioClip clip = MakeAudioClip(file);
if (clip)
{
clip.name = Path.GetFileName(file);
KMSoundOverride soundOverride = new GameObject().AddComponent<KMSoundOverride>();
soundOverride.OverrideEffect = (KMSoundOverride.SoundEffect) Enum.Parse(SoundEffect, fileName, true);
soundOverride.AudioClip = clip;
Overrides.Add(soundOverride);
}
else
{
Log("Failed to create an AudioClip for {0}. Skipping.", fileName);
}
}
else
{
Log("{0} isn't a valid sound effect. Skipping.", fileName);
}
}
foreach (string directory in Directory.GetDirectories(soundpackDirectory))
{
string dirName = new DirectoryInfo(directory).Name;
if (IsDefined(SoundEffect, dirName))
{
KMSoundOverride soundOverride = new GameObject().AddComponent<KMSoundOverride>();
soundOverride.OverrideEffect = (KMSoundOverride.SoundEffect) Enum.Parse(SoundEffect, dirName, true);
List<AudioClip> audioClips = new List<AudioClip>();
foreach (string file in Directory.GetFiles(directory))
{
Log("Creating AudioClip for {0}.", Path.Combine(dirName, Path.GetFileName(file)));
AudioClip clip = MakeAudioClip(file);
if (clip)
{
clip.name = Path.GetFileName(file);
if (!soundOverride.AudioClip)
{
soundOverride.AudioClip = clip;
}
else
{
audioClips.Add(clip);
}
}
else
{
Log("Failed to create an AudioClip for {0}. Skipping.", Path.Combine(dirName, Path.GetFileName(file)));
}
}
if (soundOverride.AudioClip)
{
if (audioClips.Count > 0)
{
soundOverride.AdditionalVariants = audioClips.ToArray();
}
Overrides.Add(soundOverride);
}
}
else
{
Log("{0} isn't a valid sound effect. Skipping.", dirName);
}
}
return Overrides;
}
private static object realMod = null;
void Start()
{
if (realMod != null)
return;
ModSettings Soundpacks = new ModSettings("EnabledSoundpacks", typeof(List<string>));
List<string> enabledSoundpacks = (List<string>) Soundpacks.Settings;
if (!Directory.Exists(SoundsDirectory))
{
Log("Created the Soundpacks directory, not loading any soundpacks.");
Directory.CreateDirectory(SoundsDirectory);
return;
}
Type ModManager = ReflectionHelper.FindType("ModManager");
FieldInfo ModManagerInstanceField = ModManager.GetField("Instance", BindingFlags.Public | BindingFlags.Static);
FieldInfo ModManagerLoadedModsDictField = ModManager.GetField("loadedMods", BindingFlags.NonPublic | BindingFlags.Instance);
object ModManagerInstance = ModManagerInstanceField.GetValue(null);
IDictionary LoadedMods = (IDictionary) ModManagerLoadedModsDictField.GetValue(ModManagerInstance);
realMod = null;
foreach (DictionaryEntry kvp in LoadedMods)
{
string key = (string) kvp.Key;
var id = (string) kvp.Value.GetType().GetProperty("ModID", BindingFlags.Public | BindingFlags.Instance).GetValue(kvp.Value, null);
Log("Key = {0}, ModID = {1}", key, id);
if (id.Equals("SoundpackMaker"))
{
realMod = kvp.Value;
break;
}
}
if (realMod == null)
return;
MethodInfo HandleSoundOverride = realMod.GetType().GetMethod("HandleSoundOverride", BindingFlags.NonPublic | BindingFlags.Instance);
Dictionary<KMSoundOverride.SoundEffect, KMSoundOverride> soundOverrides = new Dictionary<KMSoundOverride.SoundEffect, KMSoundOverride>();
// Add the new sound effects.
foreach (string soundpackName in enabledSoundpacks)
{
string soundpackDirectory = Path.Combine(SoundsDirectory, soundpackName);
if (Directory.Exists(soundpackDirectory))
{
Log("Adding soundpack: {0}", soundpackName);
foreach (KMSoundOverride soundOverride in LoadSoundpack(soundpackDirectory))
{
if (soundOverrides.ContainsKey(soundOverride.OverrideEffect))
{
KMSoundOverride sOverride = soundOverrides[soundOverride.OverrideEffect];
List<AudioClip> clips = new List<AudioClip>();
clips.Add(soundOverride.AudioClip);
if (soundOverride.AdditionalVariants != null) clips.AddRange(soundOverride.AdditionalVariants);
if (sOverride.AdditionalVariants != null) clips.AddRange(sOverride.AdditionalVariants);
sOverride.AdditionalVariants = clips.ToArray();
soundOverride.AdditionalVariants = null;
soundOverride.AudioClip = null;
Destroy(soundOverride);
}
else
{
soundOverrides[soundOverride.OverrideEffect] = soundOverride;
}
}
}
else
{
Log("There is no soundpack called \"{0}\"", soundpackName);
}
}
foreach (KMSoundOverride soundOverride in soundOverrides.Values)
{
HandleSoundOverride.Invoke(realMod, new object[] { soundOverride });
}
}
}
<file_sep>/Synchronization/Assets/Synchronization/SynchronizationModule.cs
using System;
using System.Linq;
using System.Collections;
using System.Collections.Generic;
using Random = UnityEngine.Random;
using UnityEngine;
using System.Text.RegularExpressions;
[RequireComponent(typeof(KMBombModule))]
[RequireComponent(typeof(KMBombInfo))]
[RequireComponent(typeof(KMAudio))]
public class SynchronizationModule : MonoBehaviour
{
public KMBombModule Module;
public KMBombInfo BombInfo;
public KMAudio Audio;
public KMSelectable SyncButton;
public TextMesh DisplayText;
public GameObject[] LightObjects;
static MonoBehaviour MonoBehaviour;
const float FlashingSpeed = 0.3f;
int DisplayNumber;
bool Solved = false;
int SelectedSpeed = 0;
int[] SyncMethod;
static int idCounter = 1;
int moduleID;
Light[] Lights;
class Light
{
bool _state = true;
Color _color = Color.white;
Material lightMat;
Coroutine flashingCoroutine;
public GameObject gameObject;
public GameObject selection;
public int speed = 0;
public float randomDelay = Random.value * FlashingSpeed;
public Light(GameObject light)
{
lightMat = light.GetComponent<Renderer>().material;
gameObject = light;
selection = light.transform.Find("Selection").gameObject;
}
void UpdateMat()
{
lightMat.SetFloat("_Blend", _state ? 1f : 0f);
lightMat.SetColor("_LitColor", _color);
}
public bool state
{
set
{
_state = value;
UpdateMat();
}
get
{
return _state;
}
}
public Color color
{
set
{
_color = value;
UpdateMat();
}
}
IEnumerator Flash()
{
yield return new WaitForSeconds(randomDelay);
while (true)
{
state = true;
yield return new WaitForSeconds(FlashingSpeed);
state = false;
yield return new WaitForSeconds((6 - speed) * FlashingSpeed);
}
}
public void StartFlashing()
{
if (speed > 0 && flashingCoroutine == null)
{
flashingCoroutine = MonoBehaviour.StartCoroutine(Flash());
}
}
public void StopFlashing()
{
if (flashingCoroutine != null)
{
MonoBehaviour.StopCoroutine(flashingCoroutine);
flashingCoroutine = null;
}
}
}
void ApplyToSpeed(int speed, Action<Light> action)
{
foreach (Light light in Lights)
{
if (light.speed == speed) action(light);
}
}
void Log(object data)
{
Debug.LogFormat("[Synchronization #{0}] {1}", moduleID, data);
}
void Log(object data, params object[] formatting)
{
Log(string.Format(data.ToString(), formatting));
}
void Start()
{
MonoBehaviour = this;
moduleID = idCounter++;
Lights = LightObjects.Select(obj => new Light(obj)).ToArray();
DisplayNumber = Random.Range(1, 10);
DisplayText.text = DisplayNumber.ToString();
Log("Displayed a {0}", DisplayNumber);
StartCoroutine(Startup());
Module.OnActivate += Activate;
}
KMSelectable.OnInteractHandler SetupInteraction(Light light)
{
return delegate ()
{
if (light.speed == 0 || Solved) return false;
light.gameObject.GetComponent<KMSelectable>().AddInteractionPunch(0.5f);
Audio.PlayGameSoundAtTransform(KMSoundOverride.SoundEffect.ButtonPress, transform);
if (SelectedSpeed == 0)
{
ApplyToSpeed(light.speed, l =>
{
l.selection.SetActive(true);
l.StopFlashing();
});
SelectedSpeed = light.speed;
}
else
{
if (SelectedSpeed == light.speed)
{
ApplyToSpeed(light.speed, l =>
{
l.selection.SetActive(false);
l.StartFlashing();
});
SelectedSpeed = 0;
}
else
{
bool valid = ValidateSync(Lights.First(l => l.speed == SelectedSpeed), light);
if (valid)
{
Log("Successfully synced {0} and {1}.", light.speed, SelectedSpeed);
ApplyToSpeed(light.speed, l =>
{
l.StopFlashing();
l.StartFlashing();
});
ApplyToSpeed(SelectedSpeed, l =>
{
l.randomDelay = light.randomDelay;
l.speed = light.speed;
l.selection.SetActive(false);
l.StartFlashing();
});
}
else
{
Module.HandleStrike();
ApplyToSpeed(SelectedSpeed, l =>
{
l.selection.SetActive(false);
l.StartFlashing();
});
}
SelectedSpeed = 0;
}
}
return false;
};
}
bool firstSyncDone = false;
bool altRuleFirstState = false;
int oppRuleFirstSpeed = 0;
bool ValidateSync(Light lightA, Light lightB)
{
int[] orderedSpeeds = Lights.Select(l => l.speed).Where(s => s != 0).Distinct().OrderBy(s => s).ToArray();
if (orderedSpeeds.Length == 1) return false;
/* Order:
* Asc = 0
* Des = 1
* Opp = 2
* State:
* + = 0
* - = 1
* Alt = 2
*/
switch (SyncMethod[0])
{
case 0:
if (lightA.speed != orderedSpeeds[0] || lightB.speed != orderedSpeeds[1]) return false;
break;
case 1:
if (lightA.speed != orderedSpeeds[orderedSpeeds.Length - 1] || lightB.speed != orderedSpeeds[orderedSpeeds.Length - 2]) return false;
break;
case 2:
if (firstSyncDone && lightB.speed != oppRuleFirstSpeed) return false; // The second light you select will always have the same speed.
if ((lightA.speed != orderedSpeeds[0] || lightB.speed != orderedSpeeds[orderedSpeeds.Length - 1]) && // Check if they have selected either slowest with fastest or fastest with slowest.
(lightA.speed != orderedSpeeds[orderedSpeeds.Length - 1] || lightB.speed != orderedSpeeds[0])) return false;
break;
}
switch (SyncMethod[1])
{
case 0:
if (lightA.state == false || lightB.state == false) return false;
break;
case 1:
if (lightA.state == true || lightB.state == true) return false;
break;
case 2:
if (firstSyncDone && lightA.state != altRuleFirstState) return false; // Make sure they keep alternating
if (lightA.state == lightB.state) return false;
altRuleFirstState = lightA.state;
break;
}
// Gather info for alt rule and opp rule.
altRuleFirstState = lightA.state;
oppRuleFirstSpeed = lightB.speed;
firstSyncDone = true;
return true;
}
string[] orders = new[] { "Asc", "Des", "Opp" };
string[] states = new[] { "+", "-", "Alt" };
int[][][] chart = new int[][][]
{
new[] {new[] {1, 1}, new[] {0, 1}, new[] {2, 2}, new[] {0, 2}, new[] {2, 2}, new[] {0, 1}, new[] {2, 1}, new[] {2, 1}, new[] {2, 0}},
new[] {new[] {0, 0}, new[] {2, 2}, new[] {1, 2}, new[] {1, 0}, new[] {1, 2}, new[] {1, 0}, new[] {0, 1}, new[] {0, 0}, new[] {0, 2}},
new[] {new[] {1, 1}, new[] {1, 2}, new[] {2, 1}, new[] {2, 0}, new[] {1, 0}, new[] {0, 2}, new[] {0, 0}, new[] {1, 1}, new[] {2, 0}}
};
int[] lightToCol = new int[] { 0, 1, 2, 7, 8, 3, 6, 5, 4 }; // Since the chart columns are in a different order than my light indexes
Vector2[] lightToDir = new Vector2[] {
new Vector2(-1, -1), new Vector2(0, -1), new Vector2(1, -1),
new Vector2(-1, 0), new Vector2(0, 0), new Vector2(1, 0),
new Vector2(-1, 1), new Vector2(0, 1), new Vector2(1, 1)
};
void Activate()
{
SyncButton.OnInteract += delegate ()
{
if (Lights.Where(l => l.speed != 0).Select(l => l.speed).Distinct().Count() == 1 && !Solved)
{
SyncButton.AddInteractionPunch(0.5f);
Audio.PlayGameSoundAtTransform(KMSoundOverride.SoundEffect.ButtonPress, transform);
if (((int) BombInfo.GetTime() % 60).ToString().Contains(DisplayNumber.ToString()))
{
Module.HandlePass();
Solved = true;
foreach (Light light in Lights)
{
light.StopFlashing();
light.state = true;
}
StartCoroutine(PlayWinAnimation());
}
else
{
Module.HandleStrike();
}
}
return false;
};
foreach (Light l in Lights)
{
l.gameObject.GetComponent<KMSelectable>().OnInteract += SetupInteraction(l);
}
List<int> speeds = new List<int>() { 1, 2, 3, 4, 5 };
List<int> lightIndexes = new List<int>() { 0, 1, 2, 3, 4, 5, 6, 7, 8 };
for (int i = 0; i < 5; i++)
{
Lights[ExtractRandom(lightIndexes)].speed = ExtractRandom(speeds);
}
foreach (int lightIndex in lightIndexes)
{
Lights[lightIndex].state = true;
}
foreach (Light light in Lights)
{
light.StartFlashing();
}
Log("Light speeds:\n{0} {1} {2}\n{3} {4} {5}\n{6} {7} {8}", Lights.Select(l => (object) l.speed).ToArray());
// Find which way the user needs to sync
int slowestLight = Array.IndexOf(Lights, Lights.Where(l => l.speed != 0).Aggregate((l1, l2) => l1.speed < l2.speed ? l1 : l2));
Vector2 chartPos = new Vector2(
lightToCol[slowestLight],
Mathf.FloorToInt((DisplayNumber - 1) / 3)
);
Log("Started at column {0}, row {1}", chartPos.x + 1, chartPos.y + 1);
chartPos += lightToDir[slowestLight] * Lights[4].speed;
chartPos.x = WrapInt((int) chartPos.x, 8);
chartPos.y = WrapInt((int) chartPos.y, 2);
Log("Ended at column {0}, row {1}", chartPos.x + 1, chartPos.y + 1);
SyncMethod = chart[(int) chartPos.y][(int) chartPos.x];
Log("Lights need to be synced in {0} {1} order", orders[SyncMethod[0]], states[SyncMethod[1]]);
}
IEnumerator Startup()
{
yield return new WaitForSeconds(1);
int[][] patterns = new int[][] {
new[] { 2, 1, 0, 3, 4, 5, 8, 7, 6 },
new[] { 7, 4, 3, 5, 0, 2 },
new[] { 6, 3, 0, 4, 8, 5, 2 },
new[] { 2, 1, 0, 3, 6, 7, 8 },
new[] { 0, 1, 2, 5, 8, 7, 6, 3 },
new[] { 0, 1, 2, 4, 6, 7, 8 }
};
int[] pattern = patterns[Random.Range(0, patterns.Length)];
foreach (int light in pattern)
{
Lights[light].state = true;
yield return new WaitForSeconds(0.1f);
}
yield return new WaitForSeconds(0.5f);
foreach (int light in pattern)
{
Lights[light].state = false;
yield return new WaitForSeconds(0.1f);
}
yield return new WaitForSeconds(1);
}
IEnumerator PlayWinAnimation()
{
int[][] WinningAnimation = {
new[] { 4 },
new[] { 1, 3, 5, 7 },
new[] { 0, 2, 6, 8 },
};
foreach (int[] frame in WinningAnimation)
{
for (int i = 1; i <= 3; i++)
{
foreach (int light in frame)
{
Lights[light].color = Color.Lerp(Color.white, Color.green, (float) i / 3);
}
yield return new WaitForSeconds(0.05f);
}
}
}
/*int[][][] WinningAnimations = {
new[] {
new[] { 4 },
new[] { 1, 3, 4, 5, 7 },
new[] { 0, 1, 2, 3, 5, 6, 7, 8 },
new[] { 0, 2, 6, 8 }
},
new[] {
new[] { 0, 1, 2 },
new[] { 2, 4, 6 },
new[] { 2, 5, 8 },
new[] { 0, 4, 8 },
new[] { 6, 7, 8 },
new[] { 6, 4, 2 },
new[] { 0, 3, 6 },
new[] { 0, 4, 8 },
new[] { 0, 1, 2 },
},
new[] {
new[] { 6 },
new[] { 3, 7 },
new[] { 0, 4, 8 },
new[] { 3, 1, 5 },
new[] { 6, 4, 2 },
new[] { 3, 7, 5 },
new[] { 0, 4, 8 },
new[] { 1, 5 },
new[] { 2 },
},
};
IEnumerator PlayWinAnimation()
{
int[][] animation = WinningAnimations[Random.Range(0, WinningAnimations.Length)];
foreach (int[] frame in animation)
{
int index = 0;
foreach (Light light in Lights)
{
light.color = frame.Contains(index) ? Color.green : Color.white;
index++;
}
yield return new WaitForSeconds(0.25f);
}
foreach (Light light in Lights) light.color = Color.white;
}*/
T ExtractRandom<T>(List<T> list)
{
int index = Random.Range(0, list.Count);
T value = list[index];
list.RemoveAt(index);
return value;
}
int WrapInt(int a, int b)
{
while (a < 0) a += b;
while (a > b) a -= b + 1;
return a;
}
private bool EqualsAny(object obj, params object[] targets)
{
return targets.Contains(obj);
}
int? StringToLight(string light)
{
Dictionary<string, string> replacements = new Dictionary<string, string>()
{
{ "center", "middle" },
{ "centre", "middle" },
{ "middle", "m" },
{ "top", "t" },
{ "bottom", "b" },
{ "left", "l" },
{ "right", "r" }
};
foreach (var replacement in replacements)
{
light = light.Replace(replacement.Key, replacement.Value);
}
light = new Regex("([lrm])([tbm]{1})").Replace(light, "$2$1");
string[] buttonPositions = new[] { "tl", "tm", "tr", "ml", "mm", "mr", "bl", "bm", "br" };
int pos = 1;
foreach (string name in buttonPositions)
{
light = light.Replace(name, pos.ToString());
pos++;
}
int lightInt;
if (light.Length == 1 && int.TryParse(light, out lightInt))
{
if (lightInt == 0) return null;
return lightInt;
}
return null;
}
public IEnumerator ProcessTwitchCommand(string command)
{
string[] split = command.ToLowerInvariant().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
if (split[0] == "sync" && split.Length == 3)
{
if (EqualsAny(split[1], "at", "on") && split[2].Length == 1)
{
int seconds;
if (int.TryParse(split[2], out seconds))
{
yield return "trycancel";
yield return new WaitUntil(() => ((int) BombInfo.GetTime() % 60).ToString().Contains(split[2]));
SyncButton.OnInteract();
yield return new WaitForSeconds(0.1f);
}
}
else if (EqualsAny(split[2], "on", "+", "true", "t", "off", "-", "false", "f"))
{
int? possibleLight = StringToLight(split[1]);
if (possibleLight != null)
{
int lightIndex = (int) possibleLight - 1;
bool lightState = EqualsAny(split[2], "on", "+", "true", "t");
if (Lights[lightIndex].speed == 0) yield break;
yield return "trycancel";
yield return new WaitUntil(() => Lights[lightIndex].state == lightState);
Lights[lightIndex].gameObject.GetComponent<KMSelectable>().OnInteract();
yield return new WaitForSeconds(0.1f);
}
}
}
}
}
<file_sep>/Cheap Checkout/Assets/CheapCheckoutModule/CheapCheckoutModule.cs
using UnityEngine;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using Random = UnityEngine.Random;
public class CheapCheckoutModule : MonoBehaviour
{
public GameObject[] Amounts;
public KMSelectable Submit;
public KMSelectable Clear;
public GameObject ItemText;
public GameObject PriceText;
public KMSelectable MoveLeft;
public KMSelectable MoveRight;
public KMAudio BombAudio;
public KMBombModule BombModule;
int DisplayPos = 0;
List<string> Items = new List<string>();
decimal Total = 0;
decimal Paid = 0;
decimal Display = 0;
decimal Change = 0;
string DOW = "";
bool waiting = false;
bool solved = false;
List<List<string>> Receipt = new List<List<string>>();
static int idCounter = 1;
int moduleID;
Dictionary<string, decimal> Prices = new Dictionary<string, decimal>()
{
{"Candy Canes", 3.51m},
{"Socks", 6.97m},
{"Lotion", 7.97m},
{"Cheese", 4.49m},
{"Mints", 6.39m},
{"Grape Jelly", 2.98m},
{"Honey", 8.25m},
{"Sugar", 2.08m},
{"Soda", 2.05m},
{"Tissues", 3.94m},
{"White Bread", 2.43m},
{"Canola Oil", 2.28m},
{"Mustard", 2.36m},
{"Deodorant", 3.97m},
{"White Milk", 3.62m},
{"Pasta Sauce", 2.30m},
{"Lollipops", 2.61m},
{"Cookies", 2.00m},
{"Paper Towels", 9.46m},
{"Tea", 2.35m},
{"Coffee Beans", 7.85m},
{"Mayonnaise", 3.99m},
{"Chocolate Milk", 5.68m},
{"Fruit Punch", 2.08m},
{"Potato Chips", 3.25m},
{"Shampoo", 4.98m},
{"Toothpaste", 2.50m},
{"Peanut Butter", 5.00m},
{"Gum", 1.12m},
{"Water Bottles", 9.37m},
{"Spaghetti", 2.92m},
{"Chocolate Bar", 2.10m},
{"Ketchup", 3.59m},
{"Cereal", 4.19m},
};
Dictionary<string, decimal> PricesLB = new Dictionary<string, decimal>()
{
{"Turkey", 2.98m},
{"Chicken", 1.99m},
{"Steak", 4.97m},
{"Pork", 4.14m},
{"Lettuce", 1.10m},
{"Potatoes", 0.68m},
{"Tomatoes", 1.80m},
{"Broccoli", 1.39m},
{"Oranges", 0.80m},
{"Lemons", 1.74m},
{"Bananas", 0.87m},
{"Grapefruit", 1.08m},
};
string[] Fruits = { "Bananas", "Grapefruit", "Lemons", "Oranges", "Tomatoes" };
string[] Sweets = { "Candy Canes", "Mints", "Honey", "Soda", "Lollipops", "Gum", "Chocolate Bar", "Fruit Punch", "Cookies", "Sugar", "Grape Jelly" };
void DebugMsg(string msg)
{
Debug.LogFormat("[Cheap Checkout #{0}] {1}", moduleID, msg);
}
int mod(int x, int m)
{
return (x % m + m) % m;
}
void ButtonPress(KMSelectable Selectable)
{
Selectable.AddInteractionPunch(0.5f);
BombAudio.PlayGameSoundAtTransform(KMSoundOverride.SoundEffect.ButtonPress, transform);
}
IEnumerator Wait(float time, Func<bool> func)
{
yield return new WaitForSeconds(time);
func();
}
TextMesh GetTextMesh(GameObject Object)
{
return Object.transform.Find("ButtonText").gameObject.GetComponent<TextMesh>();
}
void UpdateDisplay()
{
if (!waiting)
{
TextMesh PriceMesh = PriceText.GetComponent<TextMesh>();
if (Change > 0)
{
PriceMesh.text = "$" + Change.ToString("N2");
PriceMesh.color = Color.yellow;
}
else
{
PriceMesh.text = "$" + Display.ToString("N2");
PriceMesh.color = Color.white;
}
}
DisplayPos = Math.Min(Math.Max(DisplayPos, 0), Items.Count - 1);
ItemText.GetComponent<TextMesh>().text = Items[DisplayPos];
}
decimal ApplySale(string item, decimal lbs, int index)
{
decimal price = decimal.Round(lbs > 0 ? PricesLB[item] * lbs : Prices[item], 2, MidpointRounding.AwayFromZero);
bool fixeditem = (lbs <= 0);
List<string> line = new List<string>();
if (fixeditem)
{
line.Add(item);
}
else
{
line.Add(lbs + "lb of " + item);
}
line.Add("$" + price.ToString("N2").PadLeft(5));
switch (DOW)
{
case "Sunday":
if (fixeditem && item.ToLower().IndexOf("s") > -1)
{
price += 2.15m;
line.Add("+2.15");
}
break;
case "Monday":
if (index == 1 || index == 3 || index == 6)
{
price *= 0.85m;
line.Add("-15%");
}
break;
case "Tuesday":
if (fixeditem)
{
// Convert to string -> Remove decimal -> Convert to decimal -> Apply digital root.
price += (decimal.Parse(price.ToString().Replace(".", "")) - 1) % 9 + 1;
line.Add("dgt rt");
}
break;
case "Wednesday":
int a = (int)(price % 10),
b = (int)(price * 10) % 10,
c = (int)(price * 100) % 10;
string highest = Math.Max(Math.Max(a, b), c).ToString();
string lowest = Math.Min(Math.Min(a, b), c).ToString();
var result = price.ToString("N2").Select(x => x.ToString() == highest ? lowest : (x.ToString() == lowest ? highest : x.ToString())).ToArray();
price = decimal.Parse(string.Join("", result));
line.Add(highest + " <-> " + lowest);
break;
case "Thursday":
if (index % 2 == 1)
{
price *= 0.5m;
line.Add("-50%");
}
break;
case "Friday":
if (!fixeditem && Array.IndexOf(Fruits, item) > -1)
{
price *= 1.25m;
line.Add("+25%");
}
break;
case "Saturday":
if (fixeditem && Array.IndexOf(Sweets, item) > -1)
{
price *= 0.65m;
line.Add("-35%");
}
break;
default:
DebugMsg("Somehow you aren't using a day of the week. Automatically solving.");
BombModule.HandlePass();
break;
}
if (line.Count == 2)
{
line.Add("");
}
var final = decimal.Round(price, 2, MidpointRounding.AwayFromZero);
line.Add("$" + final.ToString("N2").PadLeft(5));
Receipt.Add(line);
return final;
}
void BuildReceipt()
{
var width = new int[4];
foreach (List<string> line in Receipt)
{
int index = 0;
foreach (string var in line)
{
width[index] = Math.Max(var.Length, width[index]);
index++;
}
}
var receipt = "";
foreach (List<string> line in Receipt)
{
int index = 0;
foreach (string var in line)
{
receipt += var.PadRight(width[index]);
if (index < line.Count - 1)
{
receipt += " ";
}
index++;
}
receipt += "\n";
}
int padding = width.Sum() + 9;
receipt += new string('─', padding) + "\n";
receipt += string.Format("{0}${1,5:N2}\n{2}${3,5:N2}\n{4}${5,5:N2}",
"TOTAL".PadRight(padding - 6), Total,
"PAID".PadRight(padding - 6), Paid,
(Paid - Total > 0 ? "CHANGE" : "DUE").PadRight(padding - 6), Math.Abs(Paid - Total));
DebugMsg("Receipt:\n" + receipt);
}
IEnumerator waitForCustomer()
{
waiting = true;
for (int i = 0; i < 2; i++)
{
for (int n = 0; n <= 3; n++)
{
PriceText.GetComponent<TextMesh>().text = "One Second" + new string('.', n);
yield return new WaitForSeconds(0.375f);
}
}
Display = Paid;
waiting = false;
UpdateDisplay();
}
void OnActivate()
{
DOW = DateTime.Now.DayOfWeek.ToString();
DebugMsg("Sale is based on " + DOW + ".");
List<string> Possible = new List<string>(Prices.Keys);
for (int i = 0; i < 4; i++)
{
var item = Possible[Random.Range(0, Possible.Count)];
Items.Add(item);
Possible.Remove(item);
decimal dollars = ApplySale(item, 0, Items.Count);
Total += dollars;
}
Possible = new List<string>(PricesLB.Keys);
for (int i = 0; i < 2; i++)
{
var item = Possible[Random.Range(0, Possible.Count)];
var lb = Random.Range(1, 4) * 0.5m;
Items.Add(lb + "lb " + item);
Possible.Remove(item);
decimal dollars = ApplySale(item, lb, Items.Count);
Total += dollars;
}
Paid = decimal.Round(Total + (decimal)Random.Range(-(float)Total / 2, (float)Total / 2));
if (Total > Paid)
{
Display = Paid;
DebugMsg("Customer underpaid with $" + Paid.ToString());
Paid = decimal.Round(Total + (decimal)Random.Range(0f, (float)Total / 2)) + 1m;
}
else
{
Display = Paid;
}
BuildReceipt();
UpdateDisplay();
foreach (GameObject button in Amounts)
{
GameObject Button = button;
KMSelectable ButtonSelectable = button.GetComponent<KMSelectable>() as KMSelectable;
ButtonSelectable.OnInteract += delegate ()
{
if (!waiting)
{
ButtonPress(ButtonSelectable);
string text = GetTextMesh(Button).text;
if (text.Length > 2)
{
BombAudio.PlaySoundAtTransform("coin_drop" + Random.Range(1, 2), transform);
}
else
{
BombAudio.PlaySoundAtTransform("count_bill" + Random.Range(1, 5), transform);
}
Change += decimal.Parse("0" + text);
UpdateDisplay();
}
else
{
BombAudio.PlayGameSoundAtTransformWithRef(KMSoundOverride.SoundEffect.CapacitorPop, transform);
}
return false;
};
}
MoveLeft.OnInteract += delegate ()
{
ButtonPress(MoveLeft);
DisplayPos--;
UpdateDisplay();
return false;
};
MoveRight.OnInteract += delegate ()
{
ButtonPress(MoveRight);
DisplayPos++;
UpdateDisplay();
return false;
};
Submit.OnInteract += delegate ()
{
if (!waiting)
{
ButtonPress(Submit);
if (Total > Display)
{
if (Change == 0)
{
StartCoroutine(waitForCustomer());
}
else
{
DebugMsg("Change was submitted when the customer should have been alerted.");
BombModule.HandleStrike();
}
}
else
{
DebugMsg("Changed entered: $" + Change.ToString("N2"));
if (Change == Paid - Total && !solved)
{
solved = true;
waiting = true;
PriceText.GetComponent<TextMesh>().color = Color.green;
BombAudio.PlaySoundAtTransform("module_solved", transform);
StartCoroutine(Wait(3f, () =>
{
DebugMsg("Module solved!");
BombModule.HandlePass();
return true;
}));
}
else
{
PriceText.GetComponent<TextMesh>().color = Color.red;
StartCoroutine(Wait(1.5f, () =>
{
UpdateDisplay();
return true;
}));
BombModule.HandleStrike();
}
}
Change = 0m;
}
else
{
BombAudio.PlayGameSoundAtTransformWithRef(KMSoundOverride.SoundEffect.CapacitorPop, transform);
}
return false;
};
Clear.OnInteract += delegate ()
{
if (!waiting)
{
ButtonPress(Clear);
Change = 0m;
UpdateDisplay();
}
else
{
BombAudio.PlayGameSoundAtTransformWithRef(KMSoundOverride.SoundEffect.CapacitorPop, transform);
}
return false;
};
}
void Start()
{
moduleID = idCounter++;
BombModule.OnActivate += OnActivate;
}
public IEnumerator ProcessTwitchCommand(string command)
{
string[] split = command.ToLowerInvariant().Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
if (split.Length == 1)
{
if (split[0] == "clear")
{
yield return null;
Clear.OnInteract();
}
else if (split[0] == "items")
{
yield return null;
for (int i = 0; i < 5; i++)
{
yield return new WaitForSeconds(1.5f);
MoveRight.OnInteract();
}
yield return new WaitForSeconds(2f);
for (int i = 0; i < 5; i++)
{
MoveLeft.OnInteract();
yield return new WaitForSeconds(0.1f);
}
}
else if (split[0] == "submit" || split[0] == "slap")
{
yield return null;
Submit.OnInteract();
}
}
else if (split.Length == 2 && split[0] == "submit")
{
decimal price;
if (decimal.TryParse(split[1], out price) && decimal.Round(price, 2) == price && price < 200)
{
yield return null;
Clear.OnInteract();
foreach (GameObject button in Amounts.Reverse())
{
decimal amount = decimal.Parse("0" + GetTextMesh(button).text);
while (price >= amount)
{
button.GetComponent<KMSelectable>().OnInteract();
price -= amount;
yield return new WaitForSeconds(0.1f);
}
}
if (Change == Paid - Total)
{
yield return "solve";
}
else
{
yield return "strike";
}
Submit.OnInteract();
}
}
}
}
| 84b303e4b0e0ae5604e856ce8f32a7cc52fb2088 | [
"C#"
] | 6 | C# | CaitSith2/KTANE-Mods-Samfun | 2a8f928b9af94297aad356b4f47eb1caa45301f0 | 968f52426ebe00b0abab08d1b58618d21d742f28 |
refs/heads/main | <repo_name>justin95214/c-add-add-study<file_sep>/c++study3/main.cpp
#include<iostream>
#include "Circle.h"
using namespace std;
int main() {
Circle circleArray[5];
circleArray[0].setRadius(10);
circleArray[1].setRadius(20);
circleArray[2].setRadius(30);
circleArray[3];
circleArray[4];
for (int i = 0; i < 5; i++)
{
cout << "Circle " << i << "의 면적은 " << circleArray[i].getArea() << endl;
}
Circle *p;
p = circleArray;
(p+4)->setRadius(40);
for (int i = 0; i < 5; i++)
{
cout << "Circle " << i << "의 면적은 " << p->getArea() << endl;
p++;
}
}<file_sep>/c++study1/main.cpp
#include <iostream>
#include "Tower.h"
using namespace std;
int main(){
Tower myTower;
Tower seoulTower(100);
cout << "높이는" << myTower.getHeight() << "미터" << endl;
cout << "높이는" << seoulTower.getHeight() << "미터" << endl;
}<file_sep>/c++study1/Tower.cpp
#include "Tower.h"
#include <iostream>
Tower::Tower()
{
tall = 1;
return;
}
Tower::Tower(int a)
{
tall = a;
return;
}
int Tower::getHeight()
{
return tall;
}<file_sep>/c++study7/main.cpp
#include<iostream>
#include"Sample.h"
using namespace std;
int main() {
Sample s(10);
s.read();
s.write();
cout << endl;
cout << "~ 가장 작은 수는" << s.small() << endl;
cout << "~ 가장 큰 수는" << s.big() << endl;
}<file_sep>/README.md
# C++-study
복습
## <list>
1. 클래스, 생성자, 객체, Code distribution() - c++study01 02
2. 객체 포인터, 객체 배열 및 객체 & 배열의 동적 생성 - c++study 03 04 05 06 07 08
3. 함수 참조 - c++study 09 10
4. 복사 생성자
5. 함수 중복과 static 멤버
6. 프렌드와 연산자 중복
7. 상속
8.
9.
10.
<file_sep>/C++study0/internel.cpp
#include <iostream>
#include "Caluculator.h"
using namespace std;
int main() {
Calculator calc;
calc.run();
} <file_sep>/c++study10/Morse.h
#pragma once
#ifndef MORSE_H
#define MORSE_H
#include <iostream>
#include <string>
using namespace std;
class Morse
{
string alpahbet[26];
string digit[10];
string slash, question, comma, plus, equal, period;
public:
Morse();
void text2morse(string text, string& morse);
//bool morse2text(string morse, string& text);
};
Morse::Morse()
{
alpahbet[0] = ".-"; alpahbet[1] = "-..."; alpahbet[2] = "-.-."; alpahbet[3] = "-..";
alpahbet[4] = "."; alpahbet[5] = "..-."; alpahbet[6] = "--."; alpahbet[7] = "....";
alpahbet[8] = ".."; alpahbet[9] = ".---"; alpahbet[10] = "-.-"; alpahbet[11] = ".-..";
alpahbet[12] = "--"; alpahbet[13] = "-."; alpahbet[14] = "---"; alpahbet[15] = ".--.";
alpahbet[16] = "--.-"; alpahbet[17] = ".-."; alpahbet[18] = "..."; alpahbet[19] = "-";
alpahbet[20] = "..-"; alpahbet[21] = "...-"; alpahbet[22] = ".--"; alpahbet[23] = "-..-";
alpahbet[24] = "-.--"; alpahbet[25] = "--.."; digit[0] = "-----"; digit[1] = ".----";
digit[2] = "..---"; digit[3] = "...--"; digit[4] = "....-"; digit[5] = ".....";
digit[6] = "-...."; digit[7] = "--..."; digit[8] = "---.."; digit[9] = "----.";
slash = "-..-."; question = "..--.."; comma = "--..--"; period = ".-.-.-";
plus = ".-.-"; equal = "-...-";
}
void Morse::text2morse(string text, string& morse){
int size = text.size();
string space;
string *convert_char = new string [size];
int word_number;
for (int i = 0; i < size; i++)
{
if (text.at(i) >= 65 && text.at(i) <= 90)
{
text.at(i) = text.at(i) + 32;
}
if (text.at(i) >= 97 && text.at(i) <= 122)
{
word_number = text.at(i) - 97;
morse.append(alpahbet[word_number]);
morse.append(" ");
}
else if (text.at(i) == 32)
{
space = text.at(i);
morse.append(space);
morse.append(" ");
}
if (text.at(i) >= 48 && text.at(i) <= 57)
{
word_number = text.at(i) - 48;
morse.append(digit[word_number]);
morse.append(" ");
}
else if (text.at(i) == 47)
{
space = text.at(i);
morse.append(slash);
morse.append(" ");
}
else if (text.at(i) == 43)
{
space = text.at(i);
morse.append(plus);
morse.append(" ");
}
else if (text.at(i) == 61)
{
space = text.at(i);
morse.append(equal);
morse.append(" ");
}
else if (text.at(i) == 63)
{
space = text.at(i);
morse.append(question);
morse.append(" ");
}
else if (text.at(i) == 46)
{
space = text.at(i);
morse.append(period);
morse.append(" ");
}
else if (text.at(i) == 44)
{
space = text.at(i);
morse.append(comma);
morse.append(" ");
}
}
}
#endif MORSE_H
<file_sep>/c++study10/main.cpp
#include <iostream>
#include <string>
#include "Morse.h"
using namespace std;
int main() {
Morse morse;
string words;
string morsewords;
cout << "문자열을 입력하시오. " << endl;
getline(cin, words);
cout << "입력된 문자열 : " << words << endl;
morse.text2morse(words,morsewords);
cout << endl;
cout << morsewords << endl;
}<file_sep>/c++study3/Circle.h
#pragma once
#ifndef CIRCLE_H
#define CIRCLE_H
class Circle
{
public:
Circle();
Circle(int r);
int radius;
double getArea();
void setRadius(int r);
};
#endif CIRCLE_H
<file_sep>/c++study2/Random.cpp
#include <iostream>
#include "Random.h"
using namespace std;
Random::Random()
{
}
int Random::nextInRange(int a, int b)
{
int random = rand();
if (random > b || random < a)
{
return random % 3 + 2;
}
else
return random;
}
int Random::nextEvenInRange(int a, int b)
{
int random = rand();
if (random > b || random < a)
{
return (random%((b-a)/2)) *2+2;
}
else
return random;
}
int Random::next()
{
int random = rand();
return random;
}<file_sep>/C++study0/Calculator.cpp
#include <iostream>
#include "Caluculator.h"
#include "Adder.h"
using namespace std;
void Calculator::run()
{
cout << "두 개의 수를 입력하시오 >>";
int a, b;
cin >> a >> b;
Adder adder(a, b);
cout << adder.proecess();
}<file_sep>/c++study1/Tower.h
#pragma once
#ifndef TOWER_H
#define TOWER_H
class Tower
{
public:
int tall;
Tower();
Tower(int a);
int getHeight();
};
#endif 1TOWER_H
<file_sep>/c++study8/Person.h
#pragma once
#ifndef PERSON_H
#define PERSON_H
#include <iostream>
#include <string>
using namespace std;
class Person
{
string name;
public:
Person() { name = ""; }
Person(string name) {name = name; }
string getName() { return name; }
void setName(string name) { this->name = name; }
};
class Family
{
string Family_name;
Person *p;
int size;
public:
Family(string name, int size) {
this->size =size;
Family_name = name;
p = new Person[size];
}
void setName(int n, string name) { p[n].setName(name); }
void show() { cout << this->Family_name << "가족은 다음과같이" << size << "명입니다."<<endl;
for (int i = 0; i < size; i++)
{
cout << (p+i)->getName() << " ";
}
cout <<"입니다."<< endl;
}
};
#endif PERSON_H
<file_sep>/c++study5/main.cpp
#include <iostream>
#include "RGB..h"
int main() {
Color screenColor(255,0,0);
Color *p;
p = &screenColor;
p->show();
Color screenColor_array[3];
p = screenColor_array;
p->SetColor(255, 0, 0);
p++;
p->SetColor(0, 255, 0);
p++;
p->SetColor(0, 0, 255);
for (int i = 0; i < 3; i++)
{
screenColor_array[i].show();
}
}<file_sep>/c++study7/Sample.h
#pragma once
#ifndef SAMPLE_H
#define SAMPLE_H
#include <iostream>
using namespace std;
class Sample
{
int *p;
int size;
public:
Sample(int n) {
size = n; p = new int [n];
}
void read() {
for (int i = 0; i < size; i++)
{
cin >> p[i];
}
}
void write() {
for (int i = 0; i < size; i++)
{
cout << p[i] << " ";
}
}
int small() {
int min = p[0];
for (int i = 1; i < size ; i++)
{
if (min > p[i])
{
min = p[i];
}
}
return min;
}
int big() {
int max = p[0];
for (int i = 1; i < size; i++)
{
if (max < p[i])
{
max = p[i];
}
}
return max;
}
};
#endif SAMPLE_H
<file_sep>/c++study2/main.cpp
#include <iostream>
#include "Random.h"
using namespace std;
int main()
{
Random r;
cout << "--0에서" << RAND_MAX << "까지의 랜덤 10개--"<<endl;
for (int i = 0; i < 10; i++)
{
int n = r.next();
cout<<n<<' ';
}
cout << endl;
cout << "--2에서 4까지의 랜덤 --" << endl;
for (int i = 0; i < 10; i++)
{
int n = r.nextInRange(2, 4);
cout << n << ' ';
}
cout << endl;
cout << "--2에서 10까지의 랜덤 --" << endl;
for (int i = 0; i < 10; i++)
{
int n = r.nextEvenInRange(2, 10);
cout << n << ' ';
}
cout << endl;
}<file_sep>/c++study8/main.cpp
#include "Person.h"
#include <iostream>
#include <string>
using namespace std;
int main() {
Family *simpson = new Family("Simpson", 3);
simpson->setName(0, "Mr. simpson");
simpson->setName(1, "Mrs. simpson");
simpson->setName(2, "<NAME>");
simpson->show();
} | 9cd66137843e57b169d6d9b7a76daf0079d58d1c | [
"Markdown",
"C++"
] | 17 | C++ | justin95214/c-add-add-study | 1e28c8a28a753f888fc62457df8b32cd6d251bd6 | be28edf350a1c2c9aa1a5e94072c6cdac49052d5 |
refs/heads/master | <repo_name>zeroasterisk/react-dump-simple<file_sep>/src/stories/index.js
/* eslint react/jsx-filename-extension: 0 */
/* eslint import/no-extraneous-dependencies: 0 */
import React from 'react';
import { storiesOf } from '@kadira/storybook';
import Dump from '../index';
import geojson from './geojson.json';
storiesOf('Dump', module)
.add('undefined', () => <Dump value={undefined} />)
.add('no value (undefined)', () => <Dump />)
.add('null', () => <Dump value={null} />)
.add('false', () => <Dump value={false} />)
.add('0', () => <Dump value={0} />)
.add('empty string', () => <Dump value="" />)
.add('baisc string', () => <Dump value="lorem ipsum" />)
.add('long string', () => (<Dump value={
`Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed accumsan nec ex vitae sagittis. Proin sit amet rutrum tortor, eu blandit dui. Interdum et malesuada fames ac ante ipsum primis in faucibus. Vivamus congue dictum finibus. Fusce lacus nisi, dictum at mattis vel, porta eget purus. Proin nec justo eget tortor molestie viverra. Sed facilisis facilisis leo et finibus. Maecenas ut ullamcorper diam. Phasellus tempor, quam non rhoncus dictum, lectus ante pretium tellus, vitae luctus urna metus in nibh.
Duis auctor libero ipsum. Curabitur id efficitur nisi, eget tincidunt odio. Morbi bibendum mauris in felis congue, sed egestas nulla euismod. Aliquam semper eget metus ut luctus. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Nulla commodo augue in mollis lobortis. Proin hendrerit velit at odio dignissim, ut ultrices quam lacinia. Cras eget finibus orci. Nullam et eros nec ligula fringilla imperdiet non in lorem. Etiam hendrerit molestie vestibulum. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nullam malesuada neque mi, ac consequat justo ultricies non. Vivamus eget tristique urna, id dictum erat. Nullam id suscipit felis. Ut accumsan commodo cursus.
Aenean eget feugiat ipsum. Suspendisse sit amet libero pharetra, rutrum ipsum non, finibus ipsum. Nam finibus venenatis consectetur. Pellentesque vel neque nec eros condimentum egestas quis sed risus. Quisque fringilla vestibulum neque, at mattis metus dignissim eleifend. Sed rutrum, libero at ultricies vulputate, enim felis gravida sapien, nec vestibulum velit nunc sit amet justo. Nunc efficitur nisl ipsum, vitae dapibus urna maximus et. Mauris porta gravida arcu in faucibus. Aliquam at lectus ipsum. Ut tincidunt pharetra lacus in ultrices. Suspendisse vitae justo ac libero consequat blandit. Ut vitae leo ac lorem sagittis luctus. Proin sit amet quam vitae arcu dignissim dapibus.`
} />))
.add('date object', () => <Dump value={new Date()} />)
.add('date object with dateFormat', () => <Dump value={new Date()} dateFormat="YYYY-MM-DD" />)
.add('date object with useInspect', () => <Dump value={new Date()} useInspect />)
.add('array of strings', () => <Dump value={['quick', 'brown', 'fox']} />)
.add('simple object', () => <Dump value={{ speed: 'quick', color: 'brown', animal: 'fox' }} />)
.add('simple object - useInspect', () => (
<Dump useInspect value={{ speed: 'quick', color: 'brown', animal: 'fox' }} />
))
.add('complex object', () => <Dump value={geojson} />)
.add('complex object - useInspect', () => <Dump value={geojson} useInspect />)
;
<file_sep>/README.md
# React Dump Simple Component
Sometimes you just want to dump the contents of a value onto the page.
If you want some colorful fanciness - check out [react-dump](https://www.npmjs.com/package/react-var-dump) <-- more featureful
But if you want something simple and plain, but still effective (maybe for an internal admin interface)...
See [storybook for examples](https://zeroasterisk.github.io/react-dump)
```js
import Dump from 'react-dump-simple';
const MyPage = props => (
<div>
Here is something I'm trying to debug:
<Dump value={props.userData} />
</div>
);
const MyComponentDumpsAllProps = props => (<Dump value={props} />);
```
_(this is super simple, but sometimes useful when developing)_
<file_sep>/src/NiceDate.js
import PropTypes from 'prop-types';
/**
* A Nice span + tooltip for a date
* does moment-timezone transformations into local timezon
* does time-since transformation if recent
*/
import moment from 'moment-timezone';
import React from 'react';
export const nicetimesince = (data, since) => {
if (!data) return '';
const m = moment(data);
if (m.isAfter(moment('2050'))) {
return '';
}
return m.from(moment(since));
};
export const tzFull = (tz) => {
if (!tz) return 'America/New_York';
switch (tz) {
case 'EST':
case 'EDT':
return 'America/New_York';
case 'CST':
case 'CDT':
return 'America/Chicago';
case 'MST':
case 'MDT':
return 'America/Boise';
case 'PST':
case 'PDT':
return 'America/Los_Angeles';
default:
return 'America/New_York';
}
};
export const momentToTZ = (data, tzInput) => moment(data).clone().tz(tzFull(tzInput));
export const nicedate = (data, format, tzInput) => {
if (!data) return '';
const m = moment.isMoment(data) ? data : moment(data);
if (m.isAfter(moment('2050-12-31'))) {
return '';
}
// calculate timezone
const tz = tzInput
|| m.format('z')
|| moment().tz(moment.tz.guess()).format('z')
|| 'America/New_York';
// convert dates from inputs into the expected timezone
const mLocal = momentToTZ(m, tz);
if (!format || format === 'calendar') {
return mLocal.calendar(null, {
sameDay: '[Today]',
nextDay: 'MMM-DD',
nextWeek: 'MMM-DD',
lastDay: 'MMM-DD',
lastWeek: 'MMM-DD',
sameElse: 'MM/DD/YYYY',
});
}
return mLocal.format(format);
};
export const nicedateTZ = (data, tzInput, format) => nicedate(data, format, tzInput);
const NiceDate = props => (
<span
className={props.className}
title={nicedateTZ(props.date, props.tz, 'YYYY-MM-DDTHH:mm:ssZ')}
>
{props.format === 'since'
? nicetimesince(props.date)
: nicedateTZ(props.date, props.tz, props.format)
}
</span>
);
NiceDate.propTypes = {
// value of date
date: PropTypes.oneOfType([ // eslint-disable-line
PropTypes.object,
PropTypes.string,
]),
tz: PropTypes.string, // eslint-disable-line
format: PropTypes.string, // eslint-disable-line
className: PropTypes.string, // eslint-disable-line
};
export default NiceDate;
| f1fa9eedb2de1388380560e2ed61eb55c699091a | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | zeroasterisk/react-dump-simple | dba3ef7d0722da8a187f545aba5794283159efce | 489a24457c388d4c7d124937b3f58eac8c5ee824 |
refs/heads/master | <file_sep>package com.sales.g10.response
data class ImageBody(
val account_id: Int?,
val account_url: String?,
val ad_config: AdConfig?,
val ad_type: Int?,
val ad_url: String?,
val animated: Boolean?,
val bandwidth: Int?,
val comment_count: Int?,
val datetime: Int?,
val description: Any?,
val downs: Int?,
val edited: Int?,
val favorite: Boolean?,
val favorite_count: Int?,
val has_sound: Boolean?,
val height: Int?,
val id: String?,
val in_gallery: Boolean?,
val in_most_viral: Boolean?,
val is_ad: Boolean?,
val is_album: Boolean?,
val link: String?,
val nsfw: Boolean?,
val points: Int?,
val score: Int?,
val section: String?,
val size: Int?,
val tags: List<Any?>?,
val title: String?,
val topic: String?,
val topic_id: Int?,
val type: String?,
val ups: Int?,
val views: Int?,
val vote: Any?,
val width: Int?
) {
data class AdConfig(
val highRiskFlags: List<Any?>?,
val safeFlags: List<String?>?,
val showsAds: Boolean?,
val unsafeFlags: List<String?>?,
val wallUnsafeFlags: List<Any?>?
)
}<file_sep>package com.sales.g10.services.presenter
import SendComment
interface GalleryPresenter {
fun getImageGallery(imageId:String,authorization:String)
fun getImageComments(imageId:String,authorization:String)
fun sendComments(imageId:String,sendComment: SendComment,authorization:String)
}<file_sep>package com.sales.g10
import android.app.Application
import androidx.lifecycle.AndroidViewModel
import com.sales.g10.db.LoginDetails
import com.sales.g10.db.repository.LoginRepository
class LoginViewModel(application: Application) : AndroidViewModel(application) {
private var repository: LoginRepository = LoginRepository(application)
fun insert(loginDetails: LoginDetails): Long? {
return repository.insert(loginDetails)
}
fun getPassword(userName:String): String?
{
return repository.getPassword(userName)
}
}<file_sep>package com.sales.g10
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import androidx.navigation.NavController
import androidx.navigation.findNavController
import com.sales.g10.fragment.SignUpFragment
class MainActivity : AppCompatActivity(),SignUpFragment.OnListener {
private lateinit var navController: NavController
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
navController = findNavController(R.id.nav_host_fragment)
}
override fun onBack() {
onBackPressed()
}
}
<file_sep>package com.sales.g10.fragment
import ResponseGArrayBody
import ResponseGBody
import SendComment
import android.content.Context
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.view.inputmethod.InputMethodManager
import android.widget.Toast
import androidx.core.content.ContextCompat
import androidx.fragment.app.Fragment
import androidx.recyclerview.widget.DividerItemDecoration
import androidx.recyclerview.widget.LinearLayoutManager
import androidx.recyclerview.widget.RecyclerView
import com.google.gson.Gson
import com.google.gson.reflect.TypeToken
import com.sales.g10.R
import com.sales.g10.adapter.CommentsAdapter
import com.sales.g10.response.Comments
import com.sales.g10.response.ErrorBodyG
import com.sales.g10.response.ImageBody
import com.sales.g10.services.interactor.GalleryInteractor
import com.sales.g10.services.presenter.GalleryPresenter
import com.sales.g10.services.view.GalleryView
import com.sales.g10.utils.Constant
import com.squareup.picasso.Picasso
import kotlinx.android.synthetic.main.fragment_image.*
import kotlinx.android.synthetic.main.progress_bar.*
/**
* A simple [Fragment] subclass.
* Use the [ImageFragment.newInstance] factory method to
* create an instance of this fragment.
*/
class ImageFragment : Fragment(), GalleryView {
private lateinit var galleryPresenter: GalleryPresenter
private lateinit var commentsAdapter: CommentsAdapter
private var commentList: ArrayList<Comments>? = null
private var imageId: String? = null
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
galleryPresenter = GalleryInteractor(this)
}
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_image, container, false)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
btnSearch.setOnClickListener {
if (!edImageSearch.text.isNullOrEmpty()) {
imageId = edImageSearch.text.toString().trim()
galleryPresenter.getImageGallery(
imageId!!,
Constant.AUTHORIZATION
)
} else {
imageId =null
Toast.makeText(activity, "Enter image id", Toast.LENGTH_SHORT).show()
}
hideKeyboard(it)
}
btnComment.setOnClickListener {
if(!edComment.text.isNullOrEmpty()) {
galleryPresenter.sendComments(
imageId!!,SendComment(edComment.text.toString().trim()),
Constant.BEARER
)
} else {
Toast.makeText(activity, "Enter comments", Toast.LENGTH_SHORT).show()
}
hideKeyboard(it)
}
commentList = ArrayList()
val layoutManager = LinearLayoutManager(activity)
layoutManager.orientation = RecyclerView.VERTICAL
val verticalDecoration = DividerItemDecoration(context, DividerItemDecoration.VERTICAL)
val verticalDivider = ContextCompat.getDrawable(activity!!, R.drawable.vertical_divider)
verticalDecoration.setDrawable(verticalDivider!!)
recyclerViewImageComments.addItemDecoration(verticalDecoration)
recyclerViewImageComments.isNestedScrollingEnabled = false
recyclerViewImageComments.layoutManager = layoutManager
commentsAdapter = CommentsAdapter(commentList!!)
recyclerViewImageComments.adapter = commentsAdapter
}
override fun showProgress() {
if (progress_bar != null) {
progress_bar.visibility = View.VISIBLE
}
}
override fun hideProgress() {
if (progress_bar != null) {
progress_bar.visibility = View.GONE
}
}
override fun onFailure(message: String) {
if (activity != null) {
group.visibility = View.GONE
imageId = null
imgView.setImageResource(R.drawable.no_image)
Constant.alertDialog(activity!!, "", msg = message)
}
}
override fun onResponseSendComment(responseGBody: ResponseGBody) {
try {
if (responseGBody.success!!)
{
edComment.setText("")
getComments()
}
else
{
val type = object : TypeToken<ErrorBodyG>() {}.type
val l = Gson().toJson(responseGBody.data)
val errorBody = Gson().fromJson<ErrorBodyG>(l, type)
Constant.alertDialog(activity!!, "", errorBody.error!!)
}
}
catch (ex:Exception)
{
ex.printStackTrace()
}
}
override fun onResponseGetComment(responseGArrayBody: ResponseGArrayBody) {
try {
commentList!!.clear()
val type = object : TypeToken<ArrayList<Comments>>() {}.type
val l = Gson().toJson(responseGArrayBody.data)
val messageList = Gson().fromJson<ArrayList<Comments>>(l, type)
if (messageList != null && messageList.isNotEmpty()) {
commentList!!.addAll(messageList)
}
commentsAdapter.notifyDataSetChanged()
} catch (ex: Exception) {
ex.printStackTrace()
}
}
override fun onResponseGetImage(responseGBody: ResponseGBody) {
try {
val type = object : TypeToken<ImageBody>() {}.type
val l = Gson().toJson(responseGBody.data)
val imageBody = Gson().fromJson<ImageBody>(l, type)
Picasso.get()
.load(imageBody.link)
.placeholder(R.drawable.loading_image)
.error(R.drawable.no_image)
.into(imgView)
getComments()
group.visibility = View.VISIBLE
} catch (ex: Exception) {
ex.printStackTrace()
}
}
private fun getComments()
{
galleryPresenter.getImageComments(
imageId!!,
Constant.AUTHORIZATION
)
}
private fun hideKeyboard(view: View)
{
val imm =activity!!.getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
imm.hideSoftInputFromWindow(view.windowToken, 0)
}
}
<file_sep>data class ResponseGBody(
val data: Any? = null,
val status: Int? = null,
val success: Boolean? = null
)<file_sep>package com.sales.g10.response
data class Comments(
val album_cover: Any?,
val author: String?,
val author_id: Int?,
val children: List<Any>?,
val comment: String?,
val datetime: Int?,
val deleted: Boolean?,
val downs: Int?,
val has_admin_badge: Boolean?,
val id: Int?,
val image_id: String?,
val on_album: Boolean?,
val parent_id: Int?,
val platform: String?,
val points: Int?,
val ups: Int?,
val vote: Any?
)<file_sep>package com.sales.g10.fragment
import android.content.Context
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.Toast
import androidx.fragment.app.Fragment
import com.sales.g10.R
import com.sales.g10.db.GtenDatabase
import com.sales.g10.db.LoginDetails
import kotlinx.android.synthetic.main.fragment_sign_up.*
import org.jetbrains.anko.doAsync
import org.jetbrains.anko.uiThread
class SignUpFragment : Fragment() {
//private lateinit var loginViewModel: LoginViewModel
private lateinit var onFragmentListener: OnListener
// private lateinit var db:GtenDatabase
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
// loginViewModel = ViewModelProviders.of(this).get(LoginViewModel::class.java)
//db=GtenDatabase.getDatabase(context!!)
}
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_sign_up, container, false)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
btnSave.setOnClickListener {
val userName= etUsername.text.toString().trim()
val password= etPassword.text.toString().trim()
if (userName.isNotEmpty() && password.isNotEmpty())
{
val loginDetails = LoginDetails()
loginDetails.gUserName= userName
loginDetails.gPassword= <PASSWORD>
saveUserToDb(loginDetails)
}
}
}
private fun saveUserToDb(loginDetails: LoginDetails)
{
doAsync {
//db.loginDetailsDao().insertLoginDetail(loginDetails)
uiThread {
Toast.makeText(context,"Saved",Toast.LENGTH_SHORT).show()
onFragmentListener.onBack()
}
}
}
override fun onAttach(context: Context) {
super.onAttach(context)
onFragmentListener= activity as OnListener
}
interface OnListener{
fun onBack()
}
}
<file_sep>data class SendComment(
val comment: String? = null
)<file_sep>package com.sales.g10.db.repository
import android.app.Application
import com.sales.g10.db.GtenDatabase
import com.sales.g10.db.LoginDetails
import com.sales.g10.db.LoginDetailsDao
class LoginRepository(application: Application) {
private var loginDetailsDao: LoginDetailsDao
init {
val database: GtenDatabase = GtenDatabase.getDatabase(
application.applicationContext
)
loginDetailsDao = database.loginDetailsDao()
}
fun insert(loginDetails: LoginDetails): Long? {
return loginDetailsDao.insertLoginDetail(loginDetails)
}
fun getPassword(gUsername:String): String? {
return loginDetailsDao.getUserNamePassword(gUsername)
}
}<file_sep>package com.sales.g10.db
import android.content.Context
import androidx.room.Database
import androidx.room.Room
import androidx.room.RoomDatabase
import androidx.room.TypeConverters
@Database(
entities = arrayOf(LoginDetails::class),
version = 1,
exportSchema = false
)
@TypeConverters(DateConverter::class)
public abstract class GtenDatabase : RoomDatabase() {
abstract fun loginDetailsDao(): LoginDetailsDao
companion object {
@Volatile
private var INSTANCE: GtenDatabase? = null
fun getDatabase(context: Context): GtenDatabase {
val tempInstance = INSTANCE
if (tempInstance != null) {
return tempInstance
}
synchronized(this) {
val instance = Room.databaseBuilder(
context.applicationContext,
GtenDatabase::class.java,
"gten_database"
).setJournalMode(JournalMode.TRUNCATE)
//.addMigrations()
.build()
INSTANCE = instance
return instance
}
}
}
}<file_sep>package com.sales.g10.services
import ResponseGArrayBody
import ResponseGBody
import SendComment
import com.sales.g10.utils.Constant
import okhttp3.MultipartBody
import okhttp3.ResponseBody
import org.jetbrains.annotations.NotNull
import retrofit2.Call
import retrofit2.http.*
interface ApiInterface {
@GET(Constant.SubUrl.GET_IMAGE)
fun getImage(@Path("imageId") imageId:String, @Header("Authorization") auth:String): Call<ResponseGBody>
@GET(Constant.SubUrl.GET_COMMENTS)
fun getComments(@Path("imageId") imageId:String,@Header("Authorization") auth:String): Call<ResponseGArrayBody>
@Multipart
@POST(Constant.SubUrl.SEND_COMMENT)
fun uploadImages(
@Path("imageId") imageId:String,
@Part file: MultipartBody.Part,
@Header("Authorization") auth:String
): Call<ResponseGBody>
}<file_sep>package com.sales.g10.utils
import android.content.Context
import androidx.appcompat.app.AlertDialog
object Constant {
const val NO_INTERNET = "No Internet, Please check your network connection."
var IS_NETWORK_AVAILABLE: Boolean = true
const val SERVER_NOT_RESPONDING = "Server not responding"
const val JAVA_NET_EXCEPTION = "java.net.ConnectException"
const val AUTHORIZATION = "Client-ID dda39a8297e4684"
const val BEARER="Bearer 695c25981b46ab8c8945dc43ae03d07182cc834d"
object SubUrl {
const val GET_IMAGE = "image/{imageId}"
const val GET_COMMENTS = "{imageId}/comments/best"
const val SEND_COMMENT = "{imageId}/comment"
}
/**
* This method is for showing Alert OR Error Dialog message of API Response
*
* @param context
* @param title
* @param msg
*/
fun alertDialog(context: Context?, title: String, msg: String) {
try {
val dialogBuilder = AlertDialog.Builder(context!!)
//val inflater = context.layoutInflater
dialogBuilder.setCancelable(false)
//val dialogView = inflater.inflate(R.layout.custom_alert_dialog, null)
//dialogBuilder.setView(dialogView)
//dialogBuilder.setTitle("Custom dialog")
dialogBuilder.setMessage(msg)
dialogBuilder.setPositiveButton("Ok", { dialog, whichButton ->
dialog.dismiss()
})
val b = dialogBuilder.create()
b.show()
} catch (e: Exception) {
e.printStackTrace()
}
}
}<file_sep>data class ResponseGArrayBody(
val data: ArrayList<Any>? = null,
val status: Int? = null,
val success: Boolean? = null
)<file_sep>package com.sales.g10.adapter
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.recyclerview.widget.RecyclerView
import com.sales.g10.R
import com.sales.g10.response.Comments
import kotlinx.android.synthetic.main.recyclerview_message_list.view.*
class CommentsAdapter(
private val arrayList: ArrayList<Comments>
) :
RecyclerView.Adapter<CommentsAdapter.ViewHolder>() {
override fun onCreateViewHolder(viewGroup: ViewGroup, viewType: Int): ViewHolder {
val view = LayoutInflater.from(viewGroup.context).inflate(
R.layout.recyclerview_message_list, viewGroup,
false
)
return ViewHolder(view)
}
override fun onBindViewHolder(holder: ViewHolder, position: Int) {
holder.bindItems(arrayList[position], position)
}
override fun getItemCount(): Int {
return arrayList.size
}
inner class ViewHolder(itemView: View) : RecyclerView.ViewHolder(itemView) {
fun bindItems(result: Comments, position: Int) {
itemView.txtMessage.text = result.comment
}
}
}<file_sep>package com.sales.g10.response
data class ErrorBodyG(
val error: String? = null,
val method: String? = null,
val request: String? = null
)<file_sep>package com.sales.g10.services.view
import ResponseGArrayBody
import ResponseGBody
interface GalleryView {
fun showProgress()
fun hideProgress()
fun onFailure(message: String)
fun onResponseGetImage(responseGBody: ResponseGBody)
fun onResponseGetComment(responseGArrayBody: ResponseGArrayBody)
fun onResponseSendComment(responseGBody: ResponseGBody)
}<file_sep>package com.sales.g10.db
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
@Dao
interface LoginDetailsDao {
@Insert(onConflict = OnConflictStrategy.REPLACE)
fun insertLoginDetail(
loginDetails: LoginDetails
): Long?
@Query("SELECT password FROM LoginDetails where username =:gUserName")
fun getUserNamePassword(gUserName:String): String?
}<file_sep>package com.sales.g10.services.interactor
import ResponseGArrayBody
import ResponseGBody
import SendComment
import com.sales.g10.AppApplication
import com.sales.g10.services.presenter.GalleryPresenter
import com.sales.g10.services.view.GalleryView
import com.sales.g10.utils.Constant
import okhttp3.MediaType
import okhttp3.MultipartBody
import okhttp3.RequestBody
import okhttp3.ResponseBody
import retrofit2.Call
import retrofit2.Callback
import retrofit2.Response
class GalleryInteractor(private val galleryView: GalleryView) : GalleryPresenter {
override fun getImageGallery(imageId: String, authorization: String) {
if (Constant.IS_NETWORK_AVAILABLE) {
galleryView.showProgress()
AppApplication.getApiClient().getRestInterface().getImage(imageId,authorization)
.enqueue(object : Callback<ResponseGBody> {
override fun onResponse(call: Call<ResponseGBody>, responseSsc: Response<ResponseGBody>) {
galleryView.hideProgress()
if (responseSsc.isSuccessful) {
galleryView.onResponseGetImage(responseSsc.body()!!)
}
else
{
galleryView.onFailure("No Image found")
}
}
override fun onFailure(call: Call<ResponseGBody>, t: Throwable) {
galleryView.hideProgress()
if (t.cause.toString().contains(Constant.JAVA_NET_EXCEPTION)) {
galleryView.onFailure(Constant.SERVER_NOT_RESPONDING)
} else {
galleryView.onFailure(t.message!!)
}
}
})
} else {
galleryView.onFailure(Constant.NO_INTERNET)
}
}
override fun getImageComments(imageId: String, authorization: String) {
if (Constant.IS_NETWORK_AVAILABLE) {
galleryView.showProgress()
AppApplication.getApiClient().getRestInterface().getComments(imageId,authorization)
.enqueue(object : Callback<ResponseGArrayBody> {
override fun onResponse(call: Call<ResponseGArrayBody>, responseSsc: Response<ResponseGArrayBody>) {
galleryView.hideProgress()
if (responseSsc.isSuccessful) {
galleryView.onResponseGetComment(responseSsc.body()!!)
}
}
override fun onFailure(call: Call<ResponseGArrayBody>, t: Throwable) {
galleryView.hideProgress()
if (t.cause.toString().contains(Constant.JAVA_NET_EXCEPTION)) {
galleryView.onFailure(Constant.SERVER_NOT_RESPONDING)
} else {
galleryView.onFailure(t.message!!)
}
}
})
} else {
galleryView.onFailure(Constant.NO_INTERNET)
}
}
override fun sendComments(imageId: String, sendComment: SendComment, authorization: String) {
if (Constant.IS_NETWORK_AVAILABLE) {
galleryView.showProgress()
//val reqFile = RequestBody.create(MediaType.parse(mimeType), file)
//val body = MultipartBody.Part.createFormData("prescriptionFiles", file.name, reqFile)
//val type = mimeType.split("/")
//val t = type[1]
val body =
MultipartBody.Part.createFormData("comment", sendComment.comment!!)
AppApplication.getApiClient().getRestInterface().uploadImages(imageId,body,authorization)
.enqueue(object : Callback<ResponseGBody> {
override fun onFailure(call: Call<ResponseGBody>, t: Throwable) {
galleryView.onFailure(t.message!!)
}
override fun onResponse(
call: Call<ResponseGBody>,
response: Response<ResponseGBody>
) {
galleryView.onResponseSendComment(response.body()!!)
}
})
} else {
galleryView.onFailure(Constant.NO_INTERNET)
}
}
}<file_sep>rootProject.name='G10'
include ':app'
<file_sep>package com.sales.g10.db
import androidx.room.ColumnInfo
import androidx.room.Entity
import androidx.room.PrimaryKey
@Entity
class LoginDetails {
@PrimaryKey(autoGenerate = true)
var id: Long? = null
@ColumnInfo(name = "username")
var gUserName: String? = null
@ColumnInfo(name = "password")
var gPassword: String? = null
} | 8892c541dcb2f26a9127127e7e1db1489fcff36a | [
"Kotlin",
"Gradle"
] | 21 | Kotlin | jnpallav/G10-work | d1bf9c73680d2f49a60f75d9325e2eecd10da47f | a4b3da6e390fdbdee75f61405dc467c10ae0b47d |
refs/heads/main | <file_sep>import http, { request } from 'http'
import { Server } from 'socket.io'
export default class SocketServer {
// #io membro privado
#io
constructor({ port }) {
this.port = port
}
async start() {
// for default my route is private
const server = http.createServer((request, response) => {
// now i'm turning my request public
response.writeHead(200, {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'OPTIONS,POST,GET'
})
response.end('Heyyy there')
})
this.#io = new Server(server, {
cors: {
origin: '*',
credentials: false
}
})
// just validating if my front end it's working
const room = this.#io.of('/room')
room.on('connection', socket => {
socket.emit('userConnection', 'socket id se conectou' + socket.id)
socket.on('joinRoom', (dados) => {
console.log('dados recebidos', dados)
})
})
// here i'm creating a socket server for my front end access
return new Promise((resolve, reject) => {
server.on('error', reject)
server.listen(this.port, () => resolve(server))
})
}
}<file_sep>import { constants } from "../../_shared/constants.js"
import SocketBuilder from "../../_shared/SocketBuilder.js"
const socket = new SocketBuilder ({
socketUrl: constants.socketUrl,
namespace: constants.socketNamespaces.room
})
const socket = SocketBuilder
.setOnUserConnected((user) => console.log('user connected'), user)
.setOnUserDisconnected((user) => console.log('user disconnected'), user)
.build()
const room = {
id: Date.now(),
topic: 'Js Experts tess'
}
const user = {
img: 'https://cdn4.iconfinder.com/data/icons/avatars-xmas-giveaway/128/batman_hero_avatar_comics-512.png',
userName: 'Batman'
}
socket.emit(constants.events.JOIN_ROOM, {user, room}) | 59231a01150d44e4a5d7761f04522e401177acf3 | [
"JavaScript"
] | 2 | JavaScript | anabneri/clubhouse-clone-js-expert-week | ed2f04af6393c920089f235552f187feeb1c2b2e | f83303c9e6d09d222a75c8d815db95a105f7100d |
refs/heads/main | <file_sep>#!/usr/bin/env bash
XOW_REPO=https://github.com/medusalix/xow.git
if grep -qs "ubuntu" /etc/os-release; then
os="ubuntu"
elif [[ -e /etc/os_release ]]; then
os="pop"
elif [[ -e /etc/os-release ]]; then
os="solus"
elif [[ -e /etc/debian_version ]]; then
os="debian"
else [[ -e /etc/fedora-release ]];
os="fedora"
fi
install_prereqs () {
if [[ $os == "ubuntu" || $os == "pop" || $os == "debian" ]]; then
sudo apt update && sudo apt -y install build-essential curl cabextract libusb-1.0-0-dev
elif [[ $os == "solus" ]]; then
sudo eopkg install -y -c system.devel curl cabextract libusb-compat-devel
else [[ $os == "fedora" ]];
sudo dnf install -y make automake gcc gcc-c++ kernel-devel curl cabextract libusb-devel
fi
}
clone_xow () {
git clone $XOW_REPO
}
build_xow () {
cd xow
make BUILD=RELEASE
}
install_xow () {
sudo make install
sudo systemctl enable xow
sudo systemctl start xow
}
optional_reboot () {
read -p "Install complete, would you like to reboot now? (yes|no)" reboot
if [[ $reboot == yes ]]; then
shutdown -r now
elif [[ $reboot == no ]]; then
exit 0
else
echo "Please provide a valid response"
fi
}
install_prereqs
clone_xow
build_xow
install_xow
optional_reboot
<file_sep># xow-install-script
The purpose of the script is to automate the installation of the Xbox One wireless dongle driver ([xow](https://github.com/medusalix/xow)).
The installation instructions on the xow page are fairly straight-forward, but I've personally ran into issues with ensuring that the
prerequisites are installed, so that will likely be this script's main usefulness.
Please see the xow page for full details about the wireless dongle driver and the important notes, the main thing being:
***By using xow, you accept Microsoft's license terms for their driver package.*** | 361d69107fd80e7c9276ba4cc78c9d806ae87ab1 | [
"Markdown",
"Shell"
] | 2 | Shell | cWashington91/xow-install-script | cd938dd951c368e7b95a33ab50e714f3ca80e989 | 5599477b6ec8629968fb6a555e64fa3cabee996d |
refs/heads/master | <repo_name>saieedgeorge0/finalschedule<file_sep>/README.txt
This is a little thing I whipped up that uses the Twilio API to allow University of Chicago students to text their classes to a number and have it text them back the location, date, and time of that final.
<file_sep>/requirements.txt
gunicorn==19.4.5
virtualenv==15.0.1
Flask>=0.8
twilio>=3.3.6<file_sep>/run.py
from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
# Try adding your own number to this list!
callers = {
"+14124179805": "George",
"+19542269211": "Philip",
}
finals = {
"AANL 10103 1": ["LEC Elementary Hittite (3) Goedegebuure 8:00 AM 10:00 AM W 6/8/2016 OR 315"],
"AKKD 10103 1": ["LEC Elementary Akkadian (3) Reculeau 10:30 AM 12:30 PM M 6/6/2016 OR 210"],
"ANTH 21420 1": ["SEM Ethnographic Methods Jenkins 1:30 PM 3:30 PM R 6/9/2016 HM 104"],
"ARAB 10103 1": ["LEC Elementary Arabic (3) abu-Eledam 10:30 AM 12:30 PM F 6/10/2016 HM 145"],
"ARAB 10103 2": ["LEC Elementary Arabic (3) abu-Eledam 10:30 AM 12:30 PM M 6/6/2016 HM 145"],
"ARAB 10103 3": ["LEC Elementary Arabic (3) Choudar 1:30 PM 3:30 PM F 6/10/2016 C 201A-B"],
"ARAB 10251 1": ["LEC Colloquial Egyptian Arabic (1) Abdel-Mobdy 1:30 PM 3:30 PM R 6/9/2016 C 210"],
"ARAB 20103 2": ["LEC Intermediate Arabic (3) Heikkinen 1:30 PM 3:30 PM F 6/10/2016 C 104"],
"ARAB 20103 3": ["LEC Intermediate Arabic (3) Abdel-Mobdy 4:00 PM 6:00 PM R 6/9/2016 WB 230"],
"ARAB 29001 1": ["LEC Arabic Through Film Forster 1:30 PM 3:30 PM T 6/7/2016 C 403"],
"ARAB 30203 1": ["LEC High Intermediate Modern Standard Arabic (3) Forster 10:30 AM 12:30 PM W 6/8/2016 HM 104"],
"ARAB 30303 1": ["LEC High Intermediate Classical Arabic (3) Heikkinen 10:30 AM 12:30 PM W 6/8/2016 C 104"],
"ARAB 30352 1": ["LEC Arabic Through Maghribi Literature Choudar 4:00 PM 6:00 PM W 6/8/2016 C 430"],
"ARAB 40102 1": ["LEC Advanced Arabic Syntax (2) Qutbuddin 10:30 AM 12:30 PM T 6/7/2016 P 218"],
"ARAB 40392 1": ["SEM Readings: The Sira Literature Donner 1:30 PM 3:30 PM F 6/10/2016 OR 210"],
"ARAM 10403 1": ["LEC Elementary Syriac (3) Creason 10:30 AM 12:30 PM R 6/9/2016 OR 208"],
"ARTH 15707 1": ["CRS American Art since the Great War English 1:30 PM 3:30 PM T 6/7/2016 CWAC 157"],
"ARTV 10100 1": ["CRS Visual Language: On Images Beck 1:30 PM 3:30 PM F 6/10/2016 LC 601"],
"ARTV 10100 2": ["CRS Visual Language: On Images Mauser 6:00 PM 8:00 PM T 6/7/2016 LC 401"],
"ARTV 10100 3": ["CRS Visual Language: On Images Adams 10:30 AM 12:30 PM T 6/7/2016 LC 401"],
"ARTV 10100 4": ["CRS Visual Language: On Images Lloyd 1:30 PM 3:30 PM T 6/7/2016 LC 601"],
"ARTV 10100 5": ["CRS Visual Language: On Images Williamson 4:00 PM 6:00 PM T 6/7/2016 LC 601"],
"ARTV 10200 1": ["CRS Visual Language: On Objects Jackson 10:30 AM 12:30 PM M 6/6/2016 LC 110"],
"ARTV 10200 2": ["CRS Visual Language: On Objects Rouse 10:30 AM 12:30 PM M 6/6/2016 Not Available"],
"ARTV 21501 1": ["CRS Introduction to Printmaking Desjardins 1:30 PM 3:30 PM T 6/7/2016 LC 109"],
"ARTV 22310 1": ["CRS Art of Engagement Ginsburg 10:30 AM 12:30 PM T 6/7/2016 LC 108"],
"ARTV 22500 1": ["CRS Digital Imaging Salavon 10:30 AM 12:30 PM M 6/6/2016 LC 028"],
"ARTV 22502 1": ["CRS Data and Algorithm in Art Salavon 1:30 PM 3:30 PM F 6/10/2016 LC 028"],
"ARTV 23804 1": ["CRS Experimental Animation Wolniak 10:30 AM 12:30 PM T 6/7/2016 LC 014"],
"ARTV 23805 1": ["CRS Minimalist Experiment in Film and Video Rodowick 1:30 PM 3:30 PM T 6/7/2016 LC 014"],
"ARTV 24121 1": ["CRS Adopted Strategies Jackson 1:30 PM 3:30 PM F 6/10/2016 LC 110"],
"ARTV 24201 1": ["CRS Collage Wolniak 1:30 PM 3:30 PM T 6/7/2016 LC 401"],
"ARTV 24401 1": ["CRS Photography (1) Letinsky 1:30 PM 3:30 PM F 6/10/2016 ED-B 045-D"],
"ARTV 24402 1": ["CRS Photography (2) Letinsky 1:30 PM 3:30 PM F 6/10/2016 Not Available"],
"ARTV 27210 1": ["CRS Intermediate/Advanced Painting Desjardins 8:00 AM 10:00 AM T 6/7/2016 LC 203"],
"ARTV 29600 1": ["CRS Junior Seminar Ginsburg 1:30 PM 3:30 PM T 6/7/2016 LC 802"],
"ARTV 39901 1": ["CRS 21st Century Art Jackson 8:00 AM 10:00 AM W 6/8/2016 LC 802"],
"ARTV 44319 1": ["CRS Writing Images/Picturing Words Stockholder 1:30 PM 3:30 PM F 6/10/2016" "LC 109"],
"ASLG 10300 1": ["CRS American Sign Language (3) Ronchen 10:30 AM 12:30 PM W 6/8/2016 C 115"],
"ASLG 10300 2": ["CRS American Sign Language (3) Ronchen 4:00 PM 6:00 PM R 6/9/2016 C 115"],
"ASLG 10600 1": ["CRS Intermediate ASL (3) Ronchen 10:30 AM 12:30 PM R 6/9/2016 C 115"],
"BANG 10300 1": ["CRS First-Year Bangla (3) Bhaduri 8:00 AM 10:00 AM W 6/8/2016 C 210"],
"BANG 20300 1": ["CRS Second-Year Bangla (3) Bhaduri 10:30 AM 12:30 PM F 6/10/2016 C 228"],
"BASQ 12200 1": ["CRS Elementary Basque (3) Palenzuela-Rodrigo 10:30 AM 12:30 PM R 6/9/2016 C 430"],
"BCSN 10303 1": ["CRS First-Year Bosnian/Croatian/Serbian (3) Petkovic 1:30 PM 3:30 PM F 6/10/2016 C 205"],
"BCSN 20303 1": ["CRS Second-Year Bosnian/Croatian/Serbian (3) Petkovic 10:30 AM 12:30 PM M 6/6/2016 F 408"],
"BCSN 21300 1": ["CRS (Re)Branding the Balkan City: Comtemp Belgrade/Sarajevo/Zagreb Petkovic 10:30 AM 12:30 PM R 6/9/2016 F 408"],
"BIOS 11128 1": ["LEC Introduction to Human Genetics Christianson 10:30 AM 12:30 PM W 6/8/2016 BSLC 205"],
"BIOS 11132 1": ["LEC Genes, Evolution, and Society Lahn 1:30 PM 3:30 PM R 6/9/2016 BSLC 205"],
"BIOS 11133 1": ["LEC Human Variation, Race, and Genomics Lindo 4:00 PM 6:00 PM W 6/8/2016 BSLC 205"],
"BIOS 11140 1": ["LEC Biotechnology for the 21st Century Bhasin 10:30 AM 12:30 PM T 6/7/2016 BSLC 218"],
"BIOS 12115 1": ["LEC Responses of Cardiopulmonary System to Stress Gupta 8:00 AM 10:00 AM T 6/7/2016 BSLC 205"],
"BIOS 12117 1": ["LEC The 3.5 Billion Year History of the Human Body Shubin 1:30 PM 3:30 PM R 6/9/2016 BSLC 008"],
"BIOS 12120 1": ["LEC Pheromones: The Chemical Signals Around You Ruvinsky 10:30 AM 12:30 PM T 6/7/2016 BSLC 001"],
"BIOS 13111 1": ["LEC Natural History of North American Deserts Larsen 1:30 PM 3:30 PM F 6/10/2016 BSLC 109"],
"BIOS 13112 0": ["LEC Natural History of North American Deserts; Field School Larsen 1:30 PM 3:30 PM F 6/10/2016 Not Available"],
"BIOS 14114 0": ["LEC Drugs Galore: What They Are and What They Do To You Zaragoza 10:30 AM 12:30 PM R 6/9/2016 BSLC 218"],
"BIOS 14115 1": ["LEC From Social Neuroscience to Medical Neuroscience and Back Cacioppo 10:30 AM 12:30 PM R 6/9/2016 BSLC 008"],
"BIOS 15115 1": ["LEC Cancer Biology: How Good Cells Go Bad Villereal 10:30 AM 12:30 PM T 6/7/2016 BSLC 008"],
"BIOS 15123 1": ["LEC The Microbiome in Human and Environmental Health Gilbert 10:30 AM 12:30 PM T 6/7/2016 BSLC 205"],
"BIOS 20150 0": ["LEC How Can We Understand the Biosphere? Allesina 10:30 AM 12:30 PM T 6/7/2016 BSLC 109"],
"BIOS 20151 0": ["LEC Introduction to Quantitative Modeling in Biology Basic Kondrashov 8:00 AM 10:00 AM T 6/7/2016 BSLC 109"],
"BIOS 20152 0": ["LEC Introduction to Quantitative Modeling in Biology Advanced Kondrashov 1:30 PM 3:30 PM T 6/7/2016 BSLC 205"],
"BIOS 20171 0": ["LEC Human Genetics and Developmental Biology Christianson 10:30 AM 12:30 PM F 6/10/2016 BSLC 205"],
"BIOS 20172 0": ["LEC Mathematical Modeling for Pre-Med Students I. Jafari-Haddadian 10:30 AM 12:30 PM W 6/8/2016 BSLC 109"],
"BIOS 20188 AA": ["LEC Fundamentals of Physiology Mcgehee 10:30 AM 12:30 PM F 6/10/2016 BSLC 109"],
"BIOS 20189 BB": ["LEC Fundamentals of Developmental Biology Ho 10:30 AM 12:30 PM M 6/6/2016 BSLC 109"],
"BIOS 20200 0": ["LEC Introduction To Biochemistry Makinen 4:00 PM 6:00 PM R 6/9/2016 BSLC 109"],
"BIOS 21207 1": ["LEC Cell Biology Lamppa 10:30 AM 12:30 PM M 6/6/2016 BSLC 240"],
"BIOS 21249 1": ["LEC Organization, Expression, and Transmission of Genome Information. Shapiro 10:30 AM 12:30 PM R 6/9/2016 BSLC 240"],
"BIOS 21317 1": ["LEC Topics in Biological Chemistry Rice 10:30 AM 12:30 PM W 6/8/2016 BSLC 218"],
"BIOS 21328 1": ["LEC Biophysics of Biomolecules Sosnick 4:00 PM 6:00 PM T 6/7/2016 KCBD 3200"],
"BIOS 21349 0": ["LEC Protein Structure and Functions in Medicine Tang 8:00 AM 10:00 AM T 6/7/2016 BSLC 313"],
"BIOS 21356 1": ["LEC Vertebrate Development Prince 10:30 AM 12:30 PM T 6/7/2016 BSLC 202"],
"BIOS 21407 1": ["LEC Image Processing In Biology Josephs 1:30 PM 3:30 PM M 6/6/2016 CLSC 119"],
"BIOS 21417 1": ["LEC Systems Biology: Molecular Regulatory Logic of Networks Aprison 10:30 AM 12:30 PM F 6/10/2016 BSLC 305"],
"BIOS 22236 1": ["LEC Reproductive Biology of Primates Martin 10:30 AM 12:30 PM W 6/8/2016 BSLC 305"],
"BIOS 22250 1": ["LEC Chordates: Evolution and Comparative Anatomy Coates 1:30 PM 3:30 PM T 6/7/2016 BSLC 305"],
"BIOS 22260 1": ["LEC Vertebrate Structure and Function Sereno 10:30 AM 12:30 PM T 6/7/2016 ACC F150"],
"BIOS 23100 1": ["LEC Dinosaur Science Sereno 8:00 AM 10:00 AM T 6/7/2016 ACC F150"],
"BIOS 23232 0": ["LEC Ecology & Evolution in the Southwest Larsen 1:30 PM 3:30 PM F 6/10/2016 Not Available"],
"BIOS 23233 0": ["LEC Ecology & Evolution in the Southwest:Field School Larsen 1:30 PM 3:30 PM F 6/10/2016 Not Available"],
"BIOS 23254 1": ["LEC Mammalian Ecology Larsen 10:30 AM 12:30 PM T 6/7/2016 BSLC 313"],
"BIOS 23299 1": ["LEC Plant Development/Molecular Genetics Greenberg 8:00 AM 10:00 AM T 6/7/2016 BSLC 305"],
"BIOS 23409 1": ["LEC The Ecology and Evolution of Infectious Diseases Dwyer 8:00 AM 10:00 AM T 6/7/2016 BSLC 240"],
"BIOS 23410 1": ["LEC Complex Interactions: Coevolution, Parasites, Mutualists, and Cheaters Lumbsch 4:00 PM 6:00 PM M 6/6/2016 BSLC 324"],
"BIOS 24205 1": ["LEC Systems Neuroscience Hale 1:30 PM 3:30 PM T 6/7/2016 BSLC 008"],
"BIOS 24218 1": ["LEC Molecular Neurobiology Sisodia 10:30 AM 12:30 PM R 6/9/2016 BSLC 313"],
"BIOS 24232 1": ["LEC Computational Approaches to Cogintive Neuroscience Hatsopoulos 1:30 PM 3:30 PM R 6/9/2016 BSLC 240"],
"BIOS 24408 1": ["LEC Modeling and Signal Analysis for Neuroscientists Van-Drongelen 1:30 PM 3:30 PM F 6/10/2016 BSLC 401"],
"BIOS 25109 1": ["LEC Tpcs: Reproductive Bio/Cancer Greene 10:30 AM 12:30 PM T 6/7/2016 BSLC 240"],
"BIOS 25126 1": ["LEC Animal Models of Human Disease Niekrasz 4:00 PM 6:00 PM W 6/8/2016 BSLC 001"],
"BIOS 25228 1": ["LEC Endocrinology III: Human Disease Musch 4:00 PM 6:00 PM R 6/9/2016 BSLC 001"],
"BIOS 25287 1": ["LEC Introduction to Virology Manicassamy 1:30 PM 3:30 PM F 6/10/2016 BSLC 001"],
"BIOS 25308 1": ["LEC Heterogeneity in Human Cancer: Etiology and Treatment Macleod 1:30 PM 3:30 PM R 6/9/2016 BSLC 202"],
"BIOS 28407 1": ["LEC Genomics and Systems Biology Gilad 1:30 PM 3:30 PM T 6/7/2016 BSLC 218"],
"BIOS 29326 1": ["LEC Intro: Medical Physics Armato-III 1:30 PM 3:30 PM T 6/7/2016 BSLC 240"],
"CABI 32000 1": ["LEC Translational Approaches in Cancer Biology Macleod 1:30 PM 3:30 PM T 6/7/2016 BSLC 202"],
"CAPP 30123 1": ["LEC Computer Science with Applications-3 Wachs 10:30 AM 12:30 PM F 6/10/2016 RY 276"],
"CAPP 30235 1": ["LEC Databases for Public Policy Elmore 8:00 AM 10:00 AM T 6/7/2016 RY 277"],
"CAPP 30254 1": ["LEC Machine Learning for Public Policy Ghani 10:30 AM 12:30 PM T 6/7/2016 RY 276"],
"CATA 11100 1": ["LEC Accelerated Catalan I Girons-Masot 10:30 AM 12:30 PM M 6/6/2016 C 210"],
"CATA 21600 1": ["LEC Catalan Culture and Society: Art, Music, and Cinema Girons-Masot 10:30 AM 12:30 PM W 6/8/2016 C 210"],
"CCTS 40006 1": ["CRS Pharmacogenomics: Discovery and Implementation Huang 10:30 AM 12:30 PM M 6/6/2016 BSLC 305"],
"CHDV 20890 1": ["SEM Mental Health: International and Social Perspectives Sandhya 1:30 PM 3:30 PM F 6/10/2016 RO 329"],
"CHDV 20890 2": ["SEM Mental Health: International and Social Perspectives Sandhya 4:00 PM 6:00 PM M 6/6/2016 RO 432"],
"CHDV 21901 1": ["CRS Language, Culture, and Thought Lucy 1:30 PM 3:30 PM T 6/7/2016 HM 130"],
"CHEM 11300 1": ["LEC Comprehensive General Chemistry-III Lee 10:30 AM 12:30 PM M 6/6/2016 K 107"],
"CHEM 11300 2": ["LEC Comprehensive General Chemistry-III Roux 10:30 AM 12:30 PM M 6/6/2016 K 120"],
"CHEM 12300 0": ["LEC Honors General Chemistry-3 Voth 8:00 AM 10:00 AM W 6/8/2016 K 120"],
"CHEM 20200 1": ["LEC Inorganic Chemistry-2 Jordan 10:30 AM 12:30 PM M 6/6/2016 K 102"],
"CHEM 22200 0": ["LEC Organic Chemistry-3 Snyder 10:30 AM 12:30 PM T 6/7/2016 K 107"],
"CHEM 23200 0": ["LEC Honors Organic Chemistry-3 Rawal 10:30 AM 12:30 PM T 6/7/2016 K 120"],
"CHEM 26300 1": ["LEC Chem Kinetic/Dynamics Butler 10:30 AM 12:30 PM F 6/10/2016 K 102"],
"CHEM 26800 1": ["LEC Computational Chemistry and Biology Dinner 8:00 AM 10:00 AM T 6/7/2016 K 120"],
"CHEM 30900 1": ["LEC Bioinorganic Chemistry He 8:00 AM 10:00 AM T 6/7/2016 K 102"],
"CHEM 36500 1": ["LEC Chemical Dynamics Sibener 10:30 AM 12:30 PM T 6/7/2016 K 102"],
"CHEM 36700 1": ["LEC Experimental Physical Chemistry Special Topics Scherer 10:30 AM 12:30 PM T 6/7/2016 K 101"],
"CHEM 38700 1": ["LEC Biophysical Chemistry Tokmakoff 8:00 AM 10:00 AM T 6/7/2016 K 101"],
"CHIN 10300 1": ["CRS Elementary Modern Chinese-3 Cai 8:00 AM 10:00 AM W 6/8/2016 C 319"],
"CHIN 10300 2": ["CRS Elementary Modern Chinese-3 Cai 8:00 AM 10:00 AM W 6/8/2016 C 319"],
"CHIN 10300 3": ["CRS Elementary Modern Chinese-3 Xiang 8:00 AM 10:00 AM W 6/8/2016 C 319"],
"CHIN 10300 4": ["CRS Elementary Modern Chinese-3 Xiang 10:30 AM 12:30 PM M 6/6/2016 C 202"],
"CHIN 10300 5": ["CRS Elementary Modern Chinese-3 Kuo 10:30 AM 12:30 PM M 6/6/2016 C 202"],
"CHIN 11300 1": ["CRS First -Yr. Chinese for Bilinqual Speakers-3 Yang 10:00 AM 12:00PM M 6/6/2016 C 304"],
"CHIN 20300 1": ["CRS Intermediate Modern Chinese-3 Li 8:00 AM 10:00 AM R 6/9/2016 STU 104"],
"CHIN 20300 2": ["CRS Intermediate Modern Chinese-3 Li 8:00 AM 10:00 AM R 6/9/2016 STU 104"],
"CHIN 21300 1": ["CRS Accelerated Chinese for Bilingual Speakers-3 Xu 8:00 AM 10:00 AM M 6/6/2016 C 430"],
"CHIN 30300 1": ["CRS Advanced Modern Chinese-3 Yang 10:00 AM 12:00PM W 6/8/2016 C 304"],
"CHIN 30300 2": ["CRS Advanced Modern Chinese-3 Xu 8:00 AM 10:00 AM R 6/8/2016 C 213"],
"CHIN 41300 1": ["CRS Fourth-Year Modern Chinese-3 Kuo 8:00 AM 10:00 AM R 6/9/2016 C 103"],
"CHIN 51300 1": ["CRS Fifth-Year Modern Chinese-3 Wang 8:00 AM 10:00 AM R 6/9/2016 C 104"],
"CLAS 34515 1": ["CRS Money and the Ancient Greek World Bresson 1:30 PM 3:30 PM F 6/10/2016 C 409"],
"CLAS 35415 1": ["CRS Text into Data: Digital Philology Dik 1:30 PM 3:30 PM T 6/7/2016 CL 021"],
"CLAS 45716 1": ["SEM Sem: Ghosts, Demons & Supernatural Danger in the Anc. World Lincoln 1:30 PM 3:30 PM F 6/10/2016 CL 021"],
"CLCV 25808 1": ["CRS <NAME> 10:30 AM 12:30 PM R 6/9/2016 HM 140"],
"CLCV 28315 1": ["SEM Ephron Seminar Gouvea 10:30 AM 12:30 PM R 6/9/2016 HM 150"],
"CLCV 29000 1": ["CRS Myth Course Shandruk 1:30 PM 3:30 PM T 6/7/2016 HM 150"],
"CMSC 11000 1": ["LEC Multimed Prog: Interdisc Art-1 Sterner 1:30 PM 3:30 PM R 6/9/2016 RY 277"],
"CMSC 12300 1": ["LEC Computer Science with Applications-3 Wachs 10:30 AM 12:30 PM F 6/10/2016 RY 277"],
"CMSC 15200 1": ["LEC Intro To Computer Science-2 Franklin 1:30 PM 3:30 PM F 6/10/2016 STU 101"],
"CMSC 15400 1": ["LEC Intro To Computer Systems Hoffmann 10:30 AM 12:30 PM M 6/6/2016 RY 251"],
"CMSC 15400 2": ["LEC Intro To Computer Systems Gunawi 10:30 AM 12:30 PM W 6/8/2016 RY 251"],
"CMSC 15400 3": ["LEC Intro To Computer Systems Wachs 1:30 PM 3:30 PM F 6/10/2016 RY 251"],
"CMSC 22001 1": ["LEC Software Construction Lu 1:30 PM 3:30 PM R 6/9/2016 RY 276"],
"CMSC 22010 1": ["LEC Digital Fabrication Stevens 1:30 PM 3:30 PM F 6/10/2016 SCL 240"],
"CMSC 22100 1": ["LEC Programming Languages Shaw 10:30 AM 12:30 PM R 6/9/2016 RY 251"],
"CMSC 23310 1": ["LEC Advanced Distributed Systems Sotomayor-Basilio 4:00 PM 6:00 PM W 6/8/2016 C 112"],
"CMSC 23310 2": ["LEC Advanced Distributed Systems Sotomayor-Basilio 4:00 PM 6:00 PM M 6/6/2016 C 112"],
"CMSC 23900 1": ["LEC Data Visualization Kindlmann 10:30 AM 12:30 PM T 6/7/2016 RY 251"],
"CMSC 25020 1": ["LEC Computational Linguistics Goldsmith 10:30 AM 12:30 PM M 6/6/2016 K 101"],
"CMSC 27200 1": ["LEC Theory of Algorithms Simon 10:30 AM 12:30 PM F 6/10/2016 RY 251"],
"CMSC 27230 1": ["LEC Honors Theory of Algorithms Drucker 10:30 AM 12:30 PM W 6/8/2016 RY 276"],
"CMSC 27410 1": ["LEC Honors Combinatorics Babai 10:30 AM 12:30 PM R 6/9/2016 RY 276"],
"CMSC 27500 1": ["LEC Graph Theory Mulmuley 8:00 AM 10:00 AM T 6/7/2016 RY 251"],
"CMSC 27610 1": ["LEC Digital Biology Scott 8:00 AM 10:00 AM T 6/7/2016 RY 276"],
"CMSC 28100 1": ["LEC Intro Complexity Theory Mulmuley 1:30 PM 3:30 PM T 6/7/2016"],
"CMSC 32001 1": ["LEC Topics: Programming Langs. Chugh 1:30 PM 3:30 PM R 6/9/2016 P 022"],
"CMSC 33001 1": ["LEC Topics in Systems Chong 1:30 PM 3:30 PM T 6/7/2016 RY 277"],
"CMSC 33251 1": ["LEC Topics in Computer Security Feldman 1:30 PM 3:30 PM F 6/10/2016 RY 277"],
"CMSC 34900 1": ["LEC Topics In Scientific Computing Scott 10:30 AM 12:30 PM R 6/9/2016 RY 277"],
"CMSC 35050 1": ["LEC Computational Linguistics Goldsmith 10:30 AM 12:30 PM M 6/6/2016 K 101"],
"CMSC 37120 1": ["LEC Topics in Discrete Mathematics Razborov 10:30 AM 12:30 PM T 6/7/2016 RY 277"],
"CMSC 37200 1": ["LEC Combinatorics Babai 10:30 AM 12:30 PM R 6/9/2016 Not Available"],
"CMSC 38100 1": ["CRS Computability Theory-2 Hirschfeldt 1:30 PM 3:30 PM T 6/7/2016 Not Available"],
"CRWR 12013 1": ["SEM Special Topics in Fiction: Genre Rules and Rebels DeWoskin 10:30 AM 12:30 PM T 6/7/2016 M 102"],
"CRWR 22115 1": ["SEM Advanced Fiction Workshop: Characters in Conflict DeWoskin 1:30 PM 3:30 PM F 6/10/2016 M 102"],
"CRWR 27103 1": ["CRS Advanced Screenwriting Petrakis 1:30 PM 3:30 PM R 6/9/2016 LC 802"],
"EALC 19900 1": ["CRS Early Modern Japanese History Toyosawa 4:00 PM 6:00 PM W 6/8/2016 C 303"],
"ECON 19800 1": ["LEC Introduction To Microeconomics Sanderson 1:30 PM 3:30 PM M 6/6/2016 SS 122"],
"ECON 19800 2": ["CRS Introduction To Microeconomics List 1:30 PM 3:30 PM R 6/9/2016 SS 122"],
"ECON 20000 1": ["LEC Elements of Economic Analysis-1 Tsiang 6:30 PM 8:30 PM M 6/6/2016 SHFE 146"],
"ECON 20000 2": ["LEC Elements of Economic Analysis-1 Tsiang 6:30 PM 8:30 PM M 6/6/2016 SHFE 146"],
"ECON 20010 1": ["LEC Elements of Economics Analysis 1: <NAME> 1:30 PM 3:30 PM T 6/7/2016 STU 102"],
"ECON 20200 1": ["LEC Elements of Economic Analysis-3 Tartari 8:00 AM 10:00 AM W 6/8/2016 STU 104"],
"ECON 20200 2": ["LEC Elements of Economic Analysis-3 Tartari 10:30 AM 12:30 PM M 6/6/2016 STU 104"],
"ECON 20200 3": ["LEC Elements of Economic Analysis-3 Lopes-de-Melo 6:30 PM 8:30 PM M 6/6/2016 Not Available"],
"ECON 20200 4": ["LEC Elements of Economic Analysis-3 Lopes-de-Melo 6:30 PM 8:30 PM M 6/6/2016 Not Available"],
"ECON 20200 5": ["LEC Elements of Economic Analysis-3 Lopes-de-Melo 6:30 PM 8:30 PM M 6/6/2016 Not Available"],
"ECON 20210 1": ["LEC Elements of Economics Analysis 3-HONORS van-Vliet 1:30 PM 3:30 PM T 6/7/2016 RO 015"],
"ECON 20300 1": ["CRS Elements of Economic Analysis-4 Wang 10:30 AM 12:30 PM M 6/6/2016 SHFE 146"],
"ECON 20300 2": ["LEC Elements of Economic Analysis-4 Hughes 10:30 AM 12:30 PM R 6/9/2016 SHFE 146"],
"ECON 20310 1": ["LEC Elements of Economics Analysis 4:H<NAME> 1:30 PM 3:30 PM T 6/7/2016 SHFE 203"],
"ECON 20700 1": ["LEC Game Theory and Economic Applications Myerson 10:30 AM 12:30 PM M 6/6/2016 SHFE 021"],
"ECON 20740 1": ["LEC Analysis of Collective Decision-Making van-Weelden 8:00 AM 10:00 AM W 6/8/2016 SHFE 146"],
"ECON 20900 1": ["LEC Intro To Econometrics: <NAME> 4:00 PM 6:00 PM W 6/8/2016 SHFE 203"],
"ECON 21000 1": ["LEC Econometrics A Hickman 4:00 PM 6:00 PM W 6/8/2016 SHFE 146"],
"ECON 21000 2": ["LEC Econometrics A Hickman 4:00 PM 6:00 PM M 6/6/2016 SHFE 203"],
"ECON 21000 3": ["LEC Econometrics A Dagnino-Jimenez 10:30 AM 12:30 PM R 6/9/2016 SHFE 203"],
"ECON 21000 4": ["LEC Econometrics A Bittmann 8:00 AM 10:00 AM T 6/7/2016 RO 011"],
"ECON 21150 1": ["LEC Topics in Applied Econometrics Tartari 1:30 PM 3:30 PM F 6/10/2016 SHFE 203"],
"ECON 21200 1": ["LEC Time Series Econometrics Marrone 10:30 AM 12:30 PM R 6/9/2016 STU 104"],
"ECON 21410 1": ["LEC Computational Methods in Economics Browne 8:00 AM 10:00 AM T 6/7/2016 SHFE 203"],
"ECON 23000 1": ["LEC Money and Banking Yoshida 8:00 AM 10:00 AM T 6/7/2016 SHFE 021"],
"ECON 25000 1": ["LEC Introduction To Finance Choi 4:00 PM 6:00 PM W 6/8/2016 RO 011"],
"ECON 25100 1": ["LEC Financial Economics B: Speculative Markets Alvarez 10:30 AM 12:30 PM T 6/7/2016 STU 105"],
"ECON 26600 1": ["LEC Urban Economics Tolley 10:30 AM 12:30 PM W 6/8/2016 SHFE 203"],
"ECON 30300 1": ["LEC Price Theory-3 Reny 1:30 PM 3:30 PM T 6/7/2016 SHFE 146"],
"ECON 30701 1": ["LEC Evolutionary Game Theory Szentes 8:00 AM 10:00 AM T 6/7/2016 SHFE 103"],
"ECON 31200 1": ["LEC Empirical Analysis-3 Bonhomme 10:30 AM 12:30 PM T 6/7/2016 STU 101"],
"ECON 31710 1": ["LEC Identification in Nonlinear Econometric Models Torgovitsky 1:30 PM 3:30 PM F 6/10/2016 P 222"],
"ECON 33200 1": ["LEC Theory of Income-3 Mulligan 8:00 AM 10:00 AM W 6/8/2016 SHFE 203"],
"ECON 34901 1": ["LEC Social Interactions and Inequality Durlauf 10:30 AM 12:30 PM M 6/6/2016 SHFE 103"],
"ECON 35003 1": ["LEC Human Capital, Markets, and the Family Heckman 4:00 PM 6:00 PM M 6/6/2016 SHFE 141"],
"ECON 35301 1": ["LEC International Trade & Growth Lucas Jr 10:30 AM 12:30 PM T 6/7/2016 SHFE 103"],
"ECON 40104 1": ["LEC Advanced Industrial Organization IV Hickman 6:00 PM 8:00 PM T 6/7/2016 SHFE 103"],
"ECON 50300 1": ["SEM Becker Applied Economics Workshop List 1:30 PM 3:30 PM R 6/9/2016 SHFE 146"],
"EGPT 10103 1": ["LEC Middle Egyptian Texts-1 Singer 10:30 AM 12:30 PM F 6/10/2016 C 102"],
"EGPT 20110 1": ["LEC Introduction to Old Egyptian Hainline 10:30 AM 12:30 PM M 6/6/2016 OR 208"],
"EGPT 20210 1": ["LEC Introduction to Late Egyptian Johnson 10:30 AM 12:30 PM F 6/10/2016 OR 208"],
"ENGL 20222 1": ["CRS Introduction to British Romantic Literature Hansen 10:30 AM 12:30 PM R 6/9/2016 SHFE 103"],
"ENST 24102 1": ["CRS Environmental Politics Lodato 4:00 PM 6:00 PM T 6/7/2016 HM 130"],
"ENST 27120 1": ["SEM Historical Ecology of the Calumet Region Lycett 10:30 AM 12:30 PM F 6/10/2016 WB 102"],
"ENST 27220 1": ["SEM Environmental Management and Planning in the Calumet Region Shaikh 10:30 AM 12:30 PM T 6/7/2016 SHFE 242"],
"ENST 27320 1": ["SEM Topics in the Ecology of the Calumet Region Anastasio 10:30 AM 12:30 PM M 6/6/2016 CL 313"],
"FINM 32400 1": ["LEC Computing for Finance-3 Liyanaarachchi 6:00 PM 8:00 PM T 6/7/2016 MS 112"],
"FINM 33150 1": ["LEC Regression Analysis and Quantitative Trading Strategies Boonstra 6:00 PM 8:00 PM W 6/8/2016 MS 112"],
"FREN 10100 1": ["LEC Beginning Elementary French-1 Grangier 8:00 AM 10:00 AM R 6/9/2016 C 106"],
"FREN 10200 2": ["LEC Beginning Elementary French-2 Liu 8:00 AM 10:00 AM R 6/9/2016 C 110"],
"FREN 10300 1": ["LEC Beginning Elementary French-3 Legrand 8:00 AM 10:00 AM R 6/9/2016 K 120"],
"FREN 10300 2": ["LEC Beginning Elementary French-3 Di-Vito 8:00 AM 10:00 AM R 6/9/2016 K 120"],
"FREN 10300 3": ["LEC Beginning Elementary French-3 Liu 8:00 AM 10:00 AM R 6/9/2016 K 120"],
"FREN 10300 4": ["LEC Beginning Elementary French-3 Faton 8:00 AM 10:00 AM R 6/9/2016 K 120"],
"FREN 10300 5": ["LEC Beginning Elementary French-3 Delgado-Norris 8:00 AM 10:00 AM R 6/9/2016 K 120"],
"FREN 20100 1": ["LEC Language History Culture-1 Legrand 8:00 AM 10:00 AM R 6/9/2016 C 402"],
"FREN 20200 1": ["LEC Language History Culture-2 Petrush 8:00 AM 10:00 AM R 6/9/2016 C 107"],
"FREN 20200 2": ["LEC Language History Culture-2 Petrush 8:00 AM 10:00 AM R 6/9/2016 C 107"],
"FREN 20300 1": ["LEC Language History Culture-3 Faton 8:00 AM 10:00 AM R 6/9/2016 STU 102"],
"FREN 20300 2": ["LEC Language History Culture-3 Bordeaux 8:00 AM 10:00 AM R 6/9/2016 STU 102"],
"FREN 20300 3": ["LEC Language History Culture-3 Bordeaux 8:00 AM 10:00 AM R 6/9/2016 STU 102"],
"FREN 20500 1": ["LEC Ecrire En Francais Berg 8:00 AM 10:00 AM R 6/9/2016 C 302"],
"FREN 20500 2": ["LEC Ecrire En Francais Gao 8:00 AM 10:00 AM R 6/9/2016 C 302"],
"FREN 20601 1": ["LEC Expression orale et phonetique Berg 10:30 AM 12:30 PM W 6/8/2016 C 302"],
"FREN 21810 1": ["CRS Introduction a la litterature francaise du XVIIIe siecle Morrissey 10:30 AM 12:30 PM M 6/6/2016 C 103"],
"GEOG 28600 1": ["SEM Advanced GIS Analysis Schuble 4:00 PM 6:00 PM W 6/8/2016 BSLC 018"],
"GEOS 13300 1": ["LEC The Atmosphere Abbot 1:30 PM 3:30 PM F 6/10/2016"],
"GEOS 21100 1": ["LEC Introduction to Petrology Dauphas 10:30 AM 12:30 PM M 6/6/2016 HGS 313"],
"GEOS 21200 1": ["LEC Physics Of The Earth Heinz 1:30 PM 3:30 PM R 6/9/2016 HGS 184"],
"GEOS 21205 1": ["LEC Intro: Seismology, Earthquakes, Near Surface Earth Seismicity MacAyeal 1:30 PM 3:30 PM R 6/9/2016 WB 310C"],
"GEOS 23400 0": ["LEC Global Warming for Science Majors Archer 10:30 AM 12:30 PM F 6/10/2016 Not Available"],
"GEOS 24250 1": ["LEC Geophysical Fluid Dynamics: Understanding the Motions of the Atmosphere and Oceans Nakamura 1:30 PM 3:30 PM F 6/10/2016 HGS 180"],
"GEOS 24705 1": ["LEC Energy: Science, Technology and Human Usage Moyer 1:30 PM 3:30 PM R 6/9/2016 SS 401"],
"GLST 24102 1": ["SEM Entertainment Industrial: Presents, Pasts, and Futures of Fun Kohl 1:30 PM 3:30 PM R 6/9/2016 C 319"],
"GLST 24103 1": ["SEM Paradise and Parks: Art, Science, Politics O'Connell 1:30 PM 3:30 PM T 6/7/2016 HM 102"],
"GREK 10300 1": ["CRS Introduction To Attic Greek-3 Darden 10:30 AM 12:30 PM W 6/8/2016 HM 150"],
"GREK 11300 1": ["CRS Accel Intro To Attic Greek-3 Wash 10:30 AM 12:30 PM W 6/8/2016 HM 135"],
"GREK 20300 1": ["CRS Intermediate Greek-3 Faraone 10:30 AM 12:30 PM W 6/8/2016 CL 021"],
"GREK 21800 1": ["CRS Greek Epic Faraone 10:30 AM 12:30 PM F 6/10/2016 CL 021"],
"GREK 32800 1": ["CRS Survey Of Greek Lit-2: Prose Dik 10:30 AM 12:30 PM T 6/7/2016 CL 021"],
"GRMN 10300 1": ["SEM Elementary German For Beginners-3 Haydt 8:00 AM 10:00 AM R 6/9/2016 SS 122"],
"GRMN 10300 2": ["SEM Elementary German For Beginners-3 Friedland 8:00 AM 10:00 AM R 6/9/2016 SS 122"],
"GRMN 10300 3": ["SEM Elementary German For Beginners-3 Flannery 8:00 AM 10:00 AM R 6/9/2016 SS 122"],
"GRMN 10300 4": ["SEM Elementary German For Beginners-3 Hooper 8:00 AM 10:00 AM R 6/9/2016 SS 122"],
"GRMN 20300 1": ["SEM Kurzprosa 20. Jahrh<NAME> 8:00 AM 10:00 AM R 6/9/2016 C 102"],
"GRMN 20300 2": ["SEM Kurzprosa 20. <NAME> 8:00 AM 10:00 AM R 6/9/2016 C 102"],
"GRMN 33300 1": ["SEM German For Research Purposes <NAME> 10:30 AM 12:30 PM R 6/9/2016 C 319"],
"HEBR 10103 1": ["LEC Elementary Classical Hebrew-3 10:30 AM 12:30 PM F 6/10/2016 WB 130"],
"HEBR 10503 1": ["CRS Introductory Modern Hebrew-3 Almog 10:30 AM 12:30 PM R 6/9/2016 C 201A-B"],
"HEBR 10503 1": ["CRS Introductory Modern Hebrew-3 Almog 10:30 AM 12:30 PM W 6/8/2016 C 201A-B"],
"HEBR 20003 1": ["LEC Punic Inscriptions Pardee 10:30 AM 12:30 PM F 6/10/2016 OR 210"],
"HEBR 20106 1": ["LEC Intermed Classical Hebrew-3 Pardee 8:00 AM 10:00 AM W 6/8/2016 OR 208"],
"HEBR 30503 1": ["CRS Advanced Modern Hebrew-3 Loewy-Shacham 1:30 PM 3:30 PM T 6/7/2016 C 201C"],
"HIND 10300 1": ["CRS First-Year Hindi-3 Grunebaum 1:30 PM 3:30 PM F 6/10/2016 C 210"],
"HIND 20300 1": ["CRS Second-Year Hindi-3 Grunebaum 1:30 PM 3:30 PM M 6/6/2016 C 213"],
"HIND 30300 1": ["CRS Third-Year Hindi-3 Williams 10:30 AM 12:30 PM T 6/7/2016 HM 135"],
"HIND 40300 1": ["CRS Fourth-Year Hindi-3 Williams 10:30 AM 12:30 PM T 6/7/2016 HM 135"],
"HIND 47902 1": ["CRS Readings: Advanced Hindi -3 Williams 10:30 AM 12:30 PM T 6/7/2016 HM 135"],
"HIST 13002 8": ["CRS History of European Civilization-2 Phillips 10:30 AM 12:30 PM M 6/6/2016 C 107"],
"HIST 13002 10": ["CRS History of European Civilization-2 Craig 10:30 AM 12:30 PM T 6/7/2016 C 107"],
"HIST 13003 7": ["CRS History of European Civilization-3 Leuchter 1:30 PM 3:30 PM T 6/7/2016 C 107"],
"HIST 13300 1": ["CRS Western Civilization-3 Weintraub 10:30 AM 12:30 PM F 6/10/2016 C 107"],
"HIST 13700 2": ["CRS America in World Civilization-3 Flores 8:00 AM 10:00 AM T 6/7/2016 RO 301"],
"HIST 13700 3": ["CRS America in World Civilization-3 Parker 1:30 PM 3:30 PM F 6/10/2016 SHFE 141"],
"HIST 13700 4": ["CRS America in World Civilization-3 Sparrow 1:30 PM 3:30 PM T 6/7/2016 WB 102"],
"HIST 15300 0": ["LEC Intro to East Asian Civilization-3 Hwang 10:30 AM 12:30 PM M 6/6/2016 CLSC 101"],
"HIST 16900 1": ["CRS Anc Mediterr World-3 Kaegi 10:30 AM 12:30 PM T 6/7/2016 C 203"],
"HIST 22505 1": ["CRS Modern Britain 1688-1901 Abritton-Jonsson 1:30 PM 3:30 PM R 6/9/2016 SS 302"],
"HIST 23706 1": ["LEC The Soviet Union Gilburd 1:30 PM 3:30 PM F 6/10/2016 HM 140"],
"HIST 24608 1": ["CRS Frontiers and Expansion in Modern China Pomeranz 10:30 AM 12:30 PM T 6/7/2016 HM 140"],
"HIST 25309 1": ["CRS History of Perception Rossi 1:30 PM 3:30 PM R 6/9/2016 WB 106"],
"HIST 25415 1": ["CRS History of Information Johns 10:30 AM 12:30 PM M 6/6/2016 SS 401"],
"HIST 29514 1": ["CRS Rise of the Modern Corporation Levy 1:30 PM 3:30 PM F 6/10/2016 C 319"],
"HIST 29632 1": ["SEM Hist Colloq: The CIA and American Democracy Cumings 1:30 PM 3:30 PM T 6/7/2016 RO 432"],
"HCHR 32106 1": ["CRS Introduction ot the Study of Iconography Krause 1:30 PM 3:30 PM T 6/7/2016 CWAC 153"],
"HMRT 20100 0": ["LEC Human Rights-1 Laurence 1:30 PM 3:30 PM F 6/10/2016 MS 112"],
"ITAL 10300 1": ["LEC Elementary Italian-3 Masciello 8:00 AM 10:00 AM R 6/9/2016 HGS 101"],
"ITAL 10300 2": ["LEC Elementary Italian-3 Guslandi 8:00 AM 10:00 AM R 6/9/2016 HGS 101"],
"ITAL 10300 3": ["LEC Elementary Italian-3 Porretto 8:00 AM 10:00 AM R 6/9/2016 HGS 101"],
"ITAL 10300 4": ["LEC Elementary Italian-3 Moslemani 8:00 AM 10:00 AM R 6/9/2016 HGS 101"],
"ITAL 12200 1": ["LEC Italian for Speakers of Romance Languages Porretto 8:00 AM 10:00 AM R 6/9/2016 C 112"],
"ITAL 20300 1": ["LEC Language History Culture-3 Vegna 8:00 AM 10:00 AM R 6/9/2016 C 409"],
"ITAL 20300 2": ["LEC Language History Culture-3 Vegna 8:00 AM 10:00 AM R 6/9/2016 C 409"],
"JAPN 10300 1": ["CRS Elementary Modern Japanese-3 Miyachi 8:00 AM 10:00 AM T 6/7/2016 C 402"],
"JAPN 10300 2": ["CRS Elementary Modern Japanese-3 Katagiri 8:00 AM 10:00 AM T 6/7/2016 C 203"],
"JAPN 10300 3": ["CRS Elementary Modern Japanese-3 Lory 8:00 AM 10:00 AM M 6/6/2016 C 403"],
"JAPN 20300 1": ["CRS Intermediate Modern Japanese-3 Katagiri 8:00 AM 10:00 AM M 6/6/2016 C 203"],
"JAPN 21300 1": ["CRS Intrmdte Japn Thru Japnmtn-2 Miyachi 8:00 AM 10:00 AM M 6/6/2016 C 402"],
"JAPN 30300 1": ["CRS Advanced Modern Japanese-3 Lory 10:30 AM 12:30 PM M 6/6/2016 C 403"],
"JWSC 20121 1": ["SEM The Bible and Archaeology Schloen 1:30 PM 3:30 PM R 6/9/2016 C 110"],
"KORE 10300 1": ["LEC Intro To Korean Language-3 Kim 10:30 AM 12:30 PM F 6/10/2016 C 219"],
"KORE 10300 2": ["LEC Intro To Korean Language-3 Kim 10:30 AM 12:30 PM M 6/6/2016 C 219"],
"KORE 20300 1": ["LEC Intermediate Korean-3 Kang 10:30 AM 12:30 PM F 6/10/2016 C 115"],
"KORE 30300 1": ["CRS Advanced Korean-3 Kim 10:30 AM 12:30 PM R 6/9/2016 C 201C"],
"KORE 42300 1": ["CRS Changing Identity of Contemporary Korea thru Film & Literature Kim 10:30 AM 12:30 PM R 6/9/2016 C 213"],
"LACS 16300 1": ["LEC Intro to Latin American Civ-3 Fischer 1:30 PM 3:30 PM F 6/10/2016 STU 102"],
"LATN 10300 1": ["CRS Introduction To Latin-3 Radding 10:30 AM 12:30 PM F 6/10/2016 CL 405"],
"LATN 10300 2": ["CRS Introduction To Latin-3 Thangada 10:30 AM 12:30 PM M 6/6/2016 HM 150"],
"LATN 11300 1": ["CRS Accel Intro To Latin-3 Weeda 10:30 AM 12:30 PM M 6/6/2016 WB 103"],
"LATN 20300 1": ["CRS Intermediate Latin-3 Allen 10:30 AM 12:30 PM M 6/6/2016 WB 130"],
"LATN 21900 1": ["CRS Roman Comedy White 10:30 AM 12:30 PM M 6/6/2016 CL 021"],
"LATN 24615 1": ["SEM Augustine: Early Philosophical Works Ando 1:30 PM 3:30 PM F 6/10/2016 CL 405"],
"LING 20001 0": ["LEC Intro to Linguistics Flinn 10:30 AM 12:30 PM M 6/6/2016 HM 130"],
"LING 20202 1": ["CRS Advanced Syntax Pietraszko 1:30 PM 3:30 PM F 6/10/2016 Y 106"],
"LING 20301 0": ["CRS Intro to Semantics & Pragmatics Francez 10:30 AM 12:30 PM T 6/7/2016 RO 011"],
"LING 21300 0": ["CRS Historical Linguistics Gorbachov 10:30 AM 12:30 PM T 6/7/2016 CLSC 101"],
"LING 27910 1": ["CRS Sign Language Linguistics Fenlon 8:00 AM 10:00 AM T 6/7/2016 WB 408"],
"LING 31000 1": ["CRS Morphology Arregui 10:30 AM 12:30 PM R 6/9/2016 P 319"],
"LING 33920 1": ["CRS The Language of Deception and Humor Riggle 1:30 PM 3:30 PM F 6/10/2016 P 016"],
"MAPS 36900 1": ["SEM Anthropology of Disability Fred 1:30 PM 3:30 PM R 6/9/2016 SHFE 141"],
"MARA 10300 1": ["CRS First Year Marathi-3 Engblom 1:30 PM 3:30 PM F 6/10/2016 C 224"],
"MARA 20300 1": ["CRS Second-Year Marathi-3 Engblom 1:30 PM 3:30 PM T 6/7/2016 C 224"],
"MATH 13200 58": ["LEC Elem Functions And Calculus-2 Chonoles 4:00 PM 6:00 PM R 6/9/2016 SS 107"],
"MATH 13300 10": ["LEC Elem Functions And Calculus-3 Moore 8:00 AM 10:00 AM W 6/8/2016 E 202"],
"MATH 13300 20": ["LEC Elem Functions And Calculus-3 Banerjee 10:30 AM 12:30 PM F 6/10/2016 E 202"],
"MATH 13300 22": ["LEC Elem Functions And Calculus-3 Chowdhury 10:30 AM 12:30 PM F 6/10/2016 E 203"],
"MATH 13300 40": ["LEC Elem Functions And Calculus-3 Pham 10:30 AM 12:30 PM W 6/8/2016 SS 105"],
"MATH 13300 42": ["LEC Elem Functions And Calculus-3 Cheng 10:30 AM 12:30 PM W 6/8/2016 SS 107"],
"MATH 13300 44": ["LEC Elem Functions And Calculus-3 Howe 10:30 AM 12:30 PM W 6/8/2016 SS 108"],
"MATH 13300 48": ["LEC Elem Functions And Calculus-3 Tran 10:30 AM 12:30 PM W 6/8/2016 P 016"],
"MATH 13300 50": ["LEC Elem Functions And Calculus-3 Zhou 4:00 PM 6:00 PM R 6/9/2016 P 022"],
"MATH 15300 11": ["LEC Calculus-3 Nagpal 8:00 AM 10:00 AM W 6/8/2016 RY 358"],
"MATH 15300 20": ["LEC Calculus-3 di Fiore 8:00 AM 10:00 AM T 6/7/2016 HGS 184"],
"MATH 15300 21": ["LEC Calculus-3 Nagpal 10:30 AM 12:30 PM F 6/10/2016 E 207"],
"MATH 15300 22": ["LEC Calculus-3 Ding 8:00 AM 10:00 AM T 6/7/2016 E 312"],
"MATH 15300 30": ["LEC Calculus-3 Rubin 10:30 AM 12:30 PM T 6/7/2016 SS 107"],
"MATH 15300 31": ["LEC Calculus-3 Campos-Salas 10:30 AM 12:30 PM M 6/6/2016 SS 105"],
"MATH 15300 32": ["LEC Calculus-3 Chen 10:30 AM 12:30 PM T 6/7/2016 SS 105"],
"MATH 15300 41": ["LEC Calculus-3 Chen 10:30 AM 12:30 PM W 6/8/2016 KPTC 101"],
"MATH 15300 45": ["LEC Calculus-3 Chen 10:30 AM 12:30 PM W 6/8/2016 RO 011"],
"MATH 15300 50": ["LEC Calculus-3 Leal 10:30 AM 12:30 PM R 6/9/2016 HGS 184"],
"MATH 15300 51": ["LEC Calculus-3 Casto 4:00 PM 6:00 PM R 6/9/2016 P 016"],
"MATH 15900 41": ["LEC Intro to Proof in Analysis & Lin. Alg. Shotton 10:30 AM 12:30 PM W 6/8/2016 RO 015"],
"MATH 15900 45": ["LEC Intro to Proof in Analysis & Lin. Alg. Bate 10:30 AM 12:30 PM W 6/8/2016 E 312"],
"MATH 15900 55": ["LEC Intro to Proof in Analysis & Lin. Alg. Bate 4:00 PM 6:00 PM R 6/9/2016 E 312"],
"MATH 15900 57": ["LEC Intro to Proof in Analysis & Lin. Alg. Shotton 4:00 PM 6:00 PM R 6/9/2016 SS 108"],
"MATH 16300 20": ["LEC Honors Calculus-3 Beaudry 8:00 AM 10:00 AM T 6/7/2016 RY 358"],
"MATH 16300 21": ["LEC Honors Calculus-3 Zimmermann 10:30 AM 12:30 PM F 6/10/2016 RY 358"],
"MATH 16300 30": ["LEC Honors Calculus-3 Creek 10:30 AM 12:30 PM T 6/7/2016 RY 358"],
"MATH 16300 31": ["LEC Honors Calculus-3 Stehnova 10:30 AM 12:30 PM M 6/6/2016 RY 358"],
"MATH 16300 32": ["LEC Honors Calculus-3 Levin 10:30 AM 12:30 PM T 6/7/2016 E 207"],
"MATH 16300 33": ["LEC Honors Calculus-3 Hickman 10:30 AM 12:30 PM M 6/6/2016 SS 107"],
"MATH 16300 41": ["LEC Honors Calculus-3 Brown 10:30 AM 12:30 PM W 6/8/2016 RY 358"],
"MATH 16300 50": ["LEC Honors Calculus-3 Grigoriev 10:30 AM 12:30 PM R 6/9/2016 RY 358"],
"MATH 16300 51": ["LEC Honors Calculus-3 Hurtado-Salazar 4:00 PM 6:00 PM R 6/9/2016 RY 358"],
"MATH 19520 41": ["LEC Math Methods for Soc. Sci Chi 10:30 AM 12:30 PM W 6/8/2016 P 022"],
"MATH 19520 49": ["LEC Math Methods for Soc. Sci Wu 10:30 AM 12:30 PM W 6/8/2016 HGS 184"],
"MATH 19520 55": ["LEC Math Methods for Soc. Sci Ho 4:00 PM 6:00 PM R 6/9/2016 HGS 184"],
"MATH 19520 59": ["LEC Math Methods for Soc. Sci Manning 4:00 PM 6:00 PM R 6/9/2016 K 102"],
"MATH 19620 30": ["LEC Linear Algebra Filip 10:30 AM 12:30 PM T 6/7/2016 E 202"],
"MATH 19620 32": ["LEC Linear Algebra Gadish 10:30 AM 12:30 PM T 6/7/2016 P 016"],
"MATH 19620 50": ["LEC Linear Algebra Chai 10:30 AM 12:30 PM R 6/9/2016 E 202"],
"MATH 19620 52": ["LEC Linear Algebra Frankel 10:30 AM 12:30 PM R 6/9/2016 E 207"],
"MATH 19620 54": ["LEC Linear Algebra Apisa 10:30 AM 12:30 PM R 6/9/2016 E 308"],
"MATH 20100 53": ["LEC Math Methods For Phy Sci-2 Jia 4:00 PM 6:00 PM R 6/9/2016 E 207"],
"MATH 20100 55": ["LEC Math Methods For Phy Sci-2 Xue 4:00 PM 6:00 PM R 6/9/2016 E 202"],
"MATH 20300 47": ["LEC Analysis In Rn-1 Lindsey 10:30 AM 12:30 PM W 6/8/2016 E 203"],
"MATH 20300 49": ["LEC Analysis In Rn-1 Xue 10:30 AM 12:30 PM W 6/8/2016 E 202"],
"MATH 20400 45": ["LEC Analysis In Rn-2 Snelson 10:30 AM 12:30 PM W 6/8/2016 E 207"],
"MATH 20400 55": ["LEC Analysis In Rn-2 Haberman 4:00 PM 6:00 PM R 6/9/2016 RO 011"],
"MATH 20500 31": ["LEC Analysis In Rn-3 Jing 10:30 AM 12:30 PM M 6/6/2016 E 308"],
"MATH 20500 33": ["LEC Analysis In Rn-3 Snelson 10:30 AM 12:30 PM M 6/6/2016 E 207"],
"MATH 20500 35": ["LEC Analysis In Rn-3 Ziesler 10:30 AM 12:30 PM M 6/6/2016 SS 108"],
"MATH 20500 41": ["LEC Analysis In Rn-3 Jing 10:30 AM 12:30 PM W 6/8/2016 E 308"],
"MATH 20500 51": ["LEC Analysis In Rn-3 Voda 4:00 PM 6:00 PM R 6/9/2016 E 308"],
"MATH 20900 31": ["LEC Honors Analysis In Rn-3 Csornyei 10:30 AM 12:30 PM M 6/6/2016 E 206"],
"MATH 21100 61": ["LEC Basic Numerical Analysis Demanet 1:30 PM 3:30 PM F 6/10/2016 E 203"],
"MATH 23500 20": ["LEC Markov Chains, Martingales, and Brownian Motion Lawler 8:00 AM 10:00 AM T 6/7/2016 E 206"],
"MATH 24100 50": ["LEC Topics In Geometry Chambers 10:30 AM 12:30 PM R 6/9/2016 E 206"],
"MATH 24200 51": ["LEC Algebraic Number Theory Corlette 4:00 PM 6:00 PM R 6/9/2016 E 203"],
"MATH 25500 51": ["LEC Basic Algebra-2 Le 4:00 PM 6:00 PM R 6/9/2016 E 206"],
"MATH 25600 11": ["LEC Basic Algebra-3 Hickman 8:00 AM 10:00 AM W 6/8/2016 E 308"],
"MATH 25600 31": ["LEC Basic Algebra-3 Le 10:30 AM 12:30 PM M 6/6/2016 E 312"],
"MATH 25600 33": ["LEC Basic Algebra-3 Le Hung 10:30 AM 12:30 PM M 6/6/2016 P 016"],
"MATH 25900 31": ["LEC Basic Algebra-3 (honors) Corlette 10:30 AM 12:30 PM M 6/6/2016 E 202"],
"MATH 25900 33": ["LEC Basic Algebra-3 (honors) Emerton 10:30 AM 12:30 PM M 6/6/2016 E 203"],
"MATH 26300 50": ["LEC Elem Algebraic Topology Zakharevich 10:30 AM 12:30 PM R 6/9/2016 E 203"],
"MATH 27000 32": ["LEC Basic Complex Variables Smart 10:30 AM 12:30 PM T 6/7/2016 E 206"],
"MATH 27400 20": ["LEC Diff Manifolds And Integration Dottener 8:00 AM 10:00 AM T 6/7/2016 E 203"],
"MATH 27500 30": ["LEC Basic Thry Partial Diff Equ Feldman 10:30 AM 12:30 PM T 6/7/2016 E 203"],
"MENG 20100 1": ["SEM Turning Science & Innovation into Impactful Technologies Guha 4:00 PM 6:00 PM M 6/6/2016 BSLC 305"],
"MENG 24300 1": ["LEC The Engineering and Biology of Tissue Repair Hubbell 1:30 PM 3:30 PM F 6/10/2016 BSLC 240"],
"MENG 26010 1": ["LEC Engineering Principles of Conservation Swartz 10:30 AM 12:30 PM R 6/9/2016 BSLC 305"],
"MENG 26020 1": ["LEC Engineering Electrodynamics Cleland 8:00 AM 10:00 AM T 6/7/2016 P 022"],
"MENG 29600 1": ["LEC Practice of Research Awschalom 1:30 PM 3:30 PM R 6/9/2016 RO 011"],
"MENG 33400 1": ["CRS Applied Probability For Engineers Ghosh 10:30 AM 12:30 PM T 6/7/2016 C 112"],
"MENG 34200 1": ["LEC Selec Tpcs Molec Engineering: Molecular/Materials Modelling II Galli 10:30 AM 12:30 PM T 6/7/2016 C 119"],
"MICR 33000 1": ["CRS Bacteria/Bacteriophage Genetics and Cell Biology Crosson 1:30 PM 3:30 PM T 6/7/2016 Not Available"],
"MOGK 20300 1": ["CRS Intermediate Modern Greek-3 Koutsiviti 10:30 AM 12:30 PM W 6/8/2016 RO 432"],
"MSBI 31100 1": ["CRS Introduction to Clinical Research Informatics McClintock 6:00 PM 8:00 PM T 6/7/2016 GC ARR"],
"MSBI 31200 1": ["CRS Leadership and Management for Informaticians Baltasi 6:00 PM 8:00 PM T 6/7/2016 Not Available"],
"MUSI 10100 1": ["LEC Intro: Western Art Music Brodsky 8:00 AM 10:00 AM T 6/7/2016 LC 802"],
"MUSI 10100 2": ["LEC Intro: Western Art Music Gordon 10:30 AM 12:30 PM R 6/9/2016 LC 901"],
"MUSI 10100 3": ["LEC Intro: Western Art Music Hopkins 4:00 PM 6:00 PM R 6/9/2016 GO-H 402"],
"MUSI 10200 1": ["LEC Introduction To World Music Dempsey 1:30 PM 3:30 PM F 6/10/2016"],
"MUSI 10200 3": ["CRS Introduction To World Music Gough 1:30 PM 3:30 PM T 6/7/2016 LC 901"],
"MUSI 10200 4": ["LEC Introduction To World Music Nimjee 4:00 PM 6:00 PM W 6/8/2016 GO-H 402"],
"MUSI 10300 1": ["LEC Intro: Music Materials/Design Cheung 10:30 AM 12:30 PM T 6/7/2016 GO-H 402"],
"MUSI 10300 2": ["LEC Intro: Music Materials/Design Pukinskis 1:30 PM 3:30 PM R 6/9/2016 GO-H 402"],
"MUSI 10400 1": ["LEC Intro: Music Analysis/Criticism Loeffler 8:00 AM 10:00 AM T 6/7/2016 GO-H 402"],
"MUSI 12200 1": ["LEC Music In Western Civ-2 Kendrick 10:30 AM 12:30 PM M 6/6/2016 C 307"],
"MUSI 15300 1": ["LEC Harmony and Voice Leading-3 Murphy 10:30 AM 12:30 PM M 6/6/2016 GO-H 402"],
"MUSI 15300 2": ["LEC Harmony and Voice Leading-3 Murphy 10:30 AM 12:30 PM W 6/8/2016 GO-H 402"],
"MUSI 23716 1": ["SEM Music of the Latin American Outlaws Sanchez-Rojo 1:30 PM 3:30 PM T 6/7/2016 GO-H 402"],
"MUSI 24316 1": ["SEM Music and Melancholy Brodsky 10:30 AM 12:30 PM T 6/7/2016 LC 802"],
"MUSI 25600 1": ["CRS Jazz Theory And Improvisation Bowden 4:00 PM 6:00 PM W 6/8/2016 Not Available"],
"MUSI 28116 1": ["CRS Piano Repertoire of the Twentieth Century Briggs 1:30 PM 3:30 PM F 6/10/2016 LC 703"],
"MUSI 30716 1": ["CRS Opera as Idea and Performance Nussbaum 1:30 PM 3:30 PM R 6/9/2016 LBQ V"],
"MUSI 34100 1": ["SEM Seminar: Composition Thomas 4:00 PM 6:00 PM T 6/7/2016 LC 901"],
"NEAA 20003 1": ["LEC Art & Archaeology Near East -3: Levant Schloen 1:30 PM 3:30 PM T 6/7/2016 C 110"],
"NEAA 20040 1": ["SEM Monuments and Monumentality in the Past and Present Osborne 8:00 AM 10:00 AM T 6/7/2016 C 106"],
"NEAA 30080 1": ["SEM Migrations and Population Movements of the Ancient Near East Osborne 1:30 PM 3:30 PM F 6/10/2016 OR 208"],
"NEHC 10101 1": ["LEC Intro To The Middle East Donner 10:30 AM 12:30 PM M 6/6/2016 RO 015"],
"NEHC 20003 1": ["CRS History & Society of Ancient Near East-3 Goedegebuure 10:30 AM 12:30 PM W 6/8/2016 HM 130"],
"NEHC 20013 1": ["LEC Ancient Empires-3 Siegel 1:30 PM 3:30 PM R 6/9/2016 HGS 101"],
"NEHC 20503 1": ["LEC Islamic History & Society-3:The Modern Middle East Shissler 10:30 AM 12:30 PM T 6/7/2016 STU 104"],
"NEHC 20603 1": ["LEC Islamic Thought and Literature-3 El-Shamsy 10:30 AM 12:30 PM M 6/6/2016 HM 140"],
"NEHC 20658 1": ["LEC Narrating Conflict in Modern Arabic Literature Hayek 1:30 PM 3:30 PM R 6/9/2016 WB 103"],
"NEHC 20766 1": ["CRS Shamans & Oral Poets of Central Asia Arik 4:00 PM 6:00 PM R 6/9/2016 C 107"],
"NEHC 30325 1": ["SEM The Medieval Muslim Curriculum El-Shamsy 4:00 PM 6:00 PM W 6/8/2016 SS 106"],
"NEHC 30722 1": ["SEM Iranian Political Culture II Payne 1:30 PM 3:30 PM R 6/9/2016 WB 130"],
"NEHC 30833 1": ["SEM Late Ottoman History-2 Shissler 10:30 AM 12:30 PM R 6/9/2016 C 228"],
"NEHC 30937 1": ["SEM Nationalism & Colonialism in the Middle East Bashkin 10:30 AM 12:30 PM M 6/6/2016 P 218"],
"NELG 20901 1": ["SEM Adv Sem: Comparative Semitic Linguistics Hasselbach 1:30 PM 3:30 PM T 6/7/2016 OR 208"],
"PBHS 35100 1": ["CRS Health Services Research Methods Sanghavi 1:30 PM 3:30 PM F 6/10/2016 BSLC 202"],
"PBPL 24751 1": ["Dis The Business of Nonprofits and the Evolving Social Sector Velasquez 4:00 PM 6:00 PM W 6/8/2016 C 110"],
"PBPL 25120 1": ["Dis Child Development and Public Policy Kalil 1:30 PM 3:30 PM M 6/6/2016 PBPL 289B"],
"PBPL 25370 1": ["Dis Social Justice and Social Policy Marinescu 4:00 PM 6:00 PM W 6/8/2016 PBPL 289A"],
"PBPL 26400 1": ["Dis Quantitative Methods in Public Policy Fowler 1:30 PM 3:30 PM R 6/9/2016 CLSC 101"],
"PBPL 28805 1": ["Dis Behavioral Economics and Policy Leitzel 10:30 AM 12:30 PM R 6/9/2016 C 104"],
"PERS 10103 1": ["LEC Elementary Persian-3 Ghahremani 10:30 AM 12:30 PM W 6/8/2016 C 102"],
"PERS 20103 1": ["LEC Intermediate Persian-3 Ghahremani 1:30 PM 3:30 PM F 6/10/2016 CL 313"],
"PHIL 23205 0": ["CRS Intro to Phenomenology Moati 4:00 PM 6:00 PM W 6/8/2016 STU 102"],
"PHIL 27000 0": ["CRS Hist Phil-3: Kant And 19th C Conant 10:30 AM 12:30 PM R 6/9/2016 SS 122"],
"PHIL 29200 1": ["CRS Junior Tutorial Amit 1:30 PM 3:30 PM F 6/10/2016 WB 106"],
"PHIL 29300 1": ["CRS Senior Tutorial Amit 1:30 PM 3:30 PM F 6/10/2016 WB 103"],
"PHSC 11000 0": ["LEC Sci/Earth: Envir Hist/Earth Webster 10:30 AM 12:30 PM W 6/8/2016 HGS 101"],
"PHSC 13400 0": ["LEC Global Warming Archer 10:30 AM 12:30 PM F 6/10/2016 K 107"],
"PHYS 12300 0": ["LEC General Physics-3 Grandi 10:30 AM 12:30 PM R 6/9/2016 KPTC 106"],
"PHYS 13300 AA": ["LEC Waves, Optics, & Heat Collar 10:30 AM 12:30 PM F 6/10/2016 KPTC 106"],
"PHYS 13300 BB": ["LEC Waves, Optics, & Heat Wakely 8:00 AM 10:00 AM T 6/7/2016 KPTC 106"],
"PHYS 14300 AA": ["LEC Honors Waves, Optics, & Heat Oreglia 10:30 AM 12:30 PM F 6/10/2016 KPTC 120"],
"PHYS 14300 BB": ["LEC Honors Waves, Optics, & Heat Schmitz 10:30 AM 12:30 PM T 6/7/2016 KPTC 120"],
"PHYS 21103 1": ["LEC Experimental Physics-3 Simon 4:00 PM 6:00 PM M 6/6/2016 KPTC 120"],
"PHYS 22000 1": ["LEC Introductory Math Methods of Physics Reid 10:30 AM 12:30 PM M 6/6/2016 KPTC 120"],
"PHYS 22600 1": ["LEC Electronics Chin 10:30 AM 12:30 PM T 6/7/2016 KPTC 105"],
"PHYS 22700 1": ["LEC Intermed Electr/Magnet-2 Levin 10:30 AM 12:30 PM F 6/10/2016 HGS 101"],
"PHYS 23400 1": ["LEC Quantum Mechanics-1 Levin 10:30 AM 12:30 PM W 6/8/2016 KPTC 106"],
"PHYS 23700 1": ["LEC Nuclei & Elementary Particles Kim 10:30 AM 12:30 PM M 6/6/2016 KPTC 103"],
"PHYS 32300 1": ["LEC Advanced Electrodynamics-2 Savard 10:30 AM 12:30 PM W 6/8/2016 KPTC 105"],
"PHYS 35200 1": ["LEC Statistical Mechanics Son 10:30 AM 12:30 PM T 6/7/2016 KPTC 103"],
"PHYS 36300 1": ["LEC Particle Physics Wang 10:30 AM 12:30 PM M 6/6/2016 KPTC 105"],
"PHYS 36700 1": ["LEC Soft Condensed Matter Phys Nagel 8:00 AM 10:00 AM T 6/7/2016 KPTC 105"],
"PHYS 37100 1": ["LEC Introduction To Cosmology Wald 8:00 AM 10:00 AM W 6/8/2016 KPTC 105"],
"PHYS 44500 1": ["LEC Quantum Field Theory-3 Carena 10:30 AM 12:30 PM F 6/10/2016 KPTC 101"],
"PHYS 48400 1": ["LEC String Theory-2 Harvey 8:00 AM 10:00 AM T 6/7/2016 KPTC 103"],
"PLSC 22913 1": ["LEC The Practice of Social Science Research Conley 10:30 AM 12:30 PM R 6/9/2016 STU 101"],
"PLSC 25303 1": ["SEM Evaluating the Candidates in the 2016 Presidential Election Conley 8:00 AM 10:00 AM T 6/7/2016 SS 108"],
"PLSC 25402 1": ["LEC Fragmented Politics and Global Markets Gulotty 8:00 AM 10:00 AM T 6/7/2016 HM 130"],
"PLSC 28701 1": ["LEC Introduction to Political Theory Wilson 4:00 PM 6:00 PM W 6/8/2016 K 107"],
"PLSC 28900 1": ["LEC Strategy Pape-Jr 1:30 PM 3:30 PM F 6/10/2016 K 107"],
"POLI 10303 1": ["CRS First Year Polish-3 Kosmala 10:30 AM 12:30 PM M 6/6/2016 C 201C"],
"POLI 20303 1": ["CRS Second-Year Polish-3 Houle 10:30 AM 12:30 PM M 6/6/2016 C 224"],
"PPHA 50600 1": ["CRS Urban Revitalization Project: <NAME> 8:00 AM 10:00 AM W 6/8/2016 PBPL 224"],
"PSYC 20400 0": ["LEC Cognitive Psychology Berman 10:30 AM 12:30 PM T 6/7/2016 SS 122"],
"PSYC 20700 1": ["LEC Sensation and Perception Le Doux 8:00 AM 10:00 AM T 6/7/2016 CLSC 101"],
"PSYC 22500 1": ["SEM Cognitive Development O'Doherty 10:30 AM 12:30 PM M 6/6/2016 STU 101"],
"PSYC 37900 1": ["LEC Experimental Design-2 Shevell 10:30 AM 12:30 PM T 6/7/2016 SS 401"],
"REES 23137 1": ["CRS Narratives Suspense in European/Russian Lit/Film Peters 10:30 AM 12:30 PM W 6/8/2016 SS 106"],
"REES 24401 1": ["CRS Vampires, Villains, & Magic: The Supernatural in Eastern Euro Franklin 10:30 AM 12:30 PM M 6/6/2016 HM 102"],
"REES 25700 1": ["CRS Russian Lit from Modernism to Postmodernism King 1:30 PM 3:30 PM R 6/9/2016 F 408"],
"RLST 10100 1": ["SEM Intro To Religious Studies Rosengarten 4:00 PM 6:00 PM W 6/8/2016 SS 107"],
"RUSS 10303 1": ["CRS First-Year Russian-3 Houle 8:00 AM 10:00 AM R 6/9/2016 P 016"],
"RUSS 10303 2": ["CRS First-Year Russian-3 Koehl 8:00 AM 10:00 AM R 6/9/2016 P 016"],
"RUSS 10303 3": ["CRS First-Year Russian-3 Postema 8:00 AM 10:00 AM R 6/9/2016 P 016"],
"RUSS 20303 1": ["CRS Second-Year Russian-3 Mandusic 4:00 PM 6:00 PM R 6/9/2016 C 205"],
"RUSS 20902 1": ["CRS Third-Year Russ: Culture-3 Pichugin 10:30 AM 12:30 PM W 6/8/2016 C 201C"],
"RUSS 21502 1": ["CRS Adv Russian Through Media-3 Pichugin 4:00 PM 6:00 PM R 6/9/2016 C 218"],
"RUSS 29912 1": ["CRS Special Topics in Advanced Russian Pichugin 4:00 PM 6:00 PM W 6/8/2016 F 408"],
"SALC 20200 1": ["CRS Intro To South Asian Civ-2 Majumdar 1:30 PM 3:30 PM F 6/10/2016 C 303"],
"SALC 47302 1": ["CRS Transmission of Islamic Knowledge in South Asia since 1800 Robinson 1:30 PM 3:30 PM R 6/9/2016 F 209"],
"SALC 49300 1": ["CRS South Asian Aesthetics: Rasa to Rap, Kamasutra to Kant Williams 1:30 PM 3:30 PM T 6/7/2016 C 115"],
"SOCI 20106 1": ["CRS Political Sociology Clark 4:00 PM 6:00 PM M 6/6/2016 KPTC 106"],
"SOCI 20112 1": ["CRS Appl Hierarchical Linear Model Raudenbush 10:30 AM 12:30 PM F 6/10/2016 SS 401"],
"SOCI 20140 1": ["CRS Qualitative Field Methods McRoberts 10:30 AM 12:30 PM T 6/7/2016 SS 404"],
"SOCI 20191 1": ["CRS Social Change in the United States Stolzenberg 10:30 AM 12:30 PM R 6/9/2016 HM 130"],
"SOCI 20192 1": ["CRS The Effects of Schooling Stolzenberg 10:30 AM 12:30 PM T 6/7/2016 HM 130"],
"SOCI 20204 1": ["CRS Sociology of Civil Society Lee 1:30 PM 3:30 PM T 6/7/2016 SS 404"],
"SOCI 20233 1": ["CRS Race in Contemporary American Society Hicks-Bartlett 1:30 PM 3:30 PM F 6/10/2016 ED 151"],
"SOCI 20236 1": ["CRS Political Modernization Garrido 1:30 PM 3:30 PM F 6/10/2016 SHFE 103"],
"SOCI 20244 1": ["CRS Political Theology II Glaeser 4:00 PM 6:00 PM M 6/6/2016 SS 404"],
"SOCI 28069 1": ["CRS Computing and Society Castelle 10:30 AM 12:30 PM R 6/9/2016 SS 404"],
"SOSC 15300 2": ["DIS Classics Soc/Polit Thought-3 Ferreira 8:00 AM 10:00 AM M 6/6/2016 SHFE 141"],
"SOSC 15300 4": ["DIS Classics Soc/Polit Thought-3 Lyons 8:00 AM 10:00 AM M 6/6/2016 C 102"],
"SOSC 15300 6": ["DIS Classics Soc/Polit Thought-3 Little 8:00 AM 10:00 AM M 6/6/2016 HM 104"],
"SOSC 15300 11": ["DIS Classics Soc/Polit Thought-3 Zaffini 8:00 AM 10:00 AM W 6/8/2016 HM 148"],
"SOSC 15300 12": ["DIS Classics Soc/Polit Thought-3 Arlen 8:00 AM 10:00 AM M 6/6/2016 C 107"],
"SOSC 15300 14": ["DIS Classics Soc/Polit Thought-3 Valiquette-Moreau 8:00 AM 10:00 AM M 6/6/2016 HM 145"],
"SOSC 15300 15": ["DIS Classics Soc/Polit Thought-3 Galloway 8:00 AM 10:00 AM M 6/6/2016 WB 103"],
"SOSC 15300 16": ["DIS Classics Soc/Polit Thought-3 Arlen 8:00 AM 10:00 AM M 6/6/2016 C 107"],
"SPAN 10200 1": ["LEC Beginning Elementary Spanish-2 Cajkova 8:00 AM 10:00 AM R 6/9/2016 C 116"],
"SPAN 10300 1": ["LEC Beginning Elementary Spanish-3 Lear 8:00 AM 10:00 AM R 6/9/2016 K 107"],
"SPAN 10300 2": ["LEC Beginning Elementary Spanish-3 Moraga-Guerra 8:00 AM 10:00 AM R 6/9/2016 K 107"],
"SPAN 10300 3": ["LEC Beginning Elementary Spanish-3 Lear 8:00 AM 10:00 AM R 6/9/2016 K 107"],
"SPAN 10300 4": ["LEC Beginning Elementary Spanish-3 Rojas 8:00 AM 10:00 AM R 6/9/2016 K 107"],
"SPAN 10300 5": ["LEC Beginning Elementary Spanish-3 Powers 8:00 AM 10:00 AM R 6/9/2016 K 107"],
"SPAN 10300 6": ["LEC Beginning Elementary Spanish-3 Mateos-Fernandez 8:00 AM 10:00 AM R 6/9/2016 K 107"],
"SPAN 10300 7": ["LEC Beginning Elementary Spanish-3 Rojas 8:00 AM 10:00 AM R 6/9/2016 K 107"],
"SPAN 20100 1": ["LEC Language History Culture-1 Indacoechea 8:00 AM 10:00 AM R 6/9/2016 C 115"],
"SPAN 20100 2": ["LEC Language History Culture-1 Gutierrez-Bascon 8:00 AM 10:00 AM R 6/9/2016 C 115"],
"SPAN 20200 1": ["LEC Language History Culture-2 Van-Den-Hout 8:00 AM 10:00 AM R 6/9/2016 C 301"],
"SPAN 20200 2": ["LEC Language History Culture-2 Tain-Gutierrez 8:00 AM 10:00 AM R 6/9/2016 C 301"],
"SPAN 20300 1": ["LEC Language History Culture-3 McCarron 8:00 AM 10:00 AM R 6/9/2016 CLSC 101"],
"SPAN 20300 2": ["LEC Language History Culture-3 Hong 8:00 AM 10:00 AM R 6/9/2016 CLSC 101"],
"SPAN 20300 3": ["LEC Language History Culture-3 Acevedo-Moreno 8:00 AM 10:00 AM R 6/9/2016 CLSC 101"],
"SPAN 20300 4": ["LEC Language History Culture-3 Sedlar 8:00 AM 10:00 AM R 6/9/2016 CLSC 101"],
"SPAN 20400 1": ["LEC Composicion y conversacion avanzada I Mateos-Fernandez 10:30 AM 12:30 PM W 6/8/2016 C 203"],
"SPAN 20500 1": ["LEC Composicion y conversacion avanzada II Indacoechea 1:30 PM 3:30 PM F 6/10/2016 C 110"],
"SPAN 20602 1": ["LEC Discurso Academico para Hablantes Nativos Van-Den-Hout 10:30 AM 12:30 PM M 6/6/2016 C 104"],
"SPAN 21100 1": ["LEC Las Regiones Del Espanol Lozada-Cerna 10:30 AM 12:30 PM M 6/6/2016 C 116"],
"STAT 20000 1": ["CRS Elementary Statistics Burbank 10:30 AM 12:30 PM W 6/8/2016 E 133"],
"STAT 22000 1": ["CRS Stat Meth And Applications Huang 10:30 AM 12:30 PM M 6/6/2016 SS 122"],
"STAT 22000 2": ["CRS Stat Meth And Applications Huang 1:30 PM 3:30 PM M 6/6/2016 E 133"],
"STAT 22200 1": ["CRS Linear Models And Exper Design Huang 10:30 AM 12:30 PM M 6/6/2016 SS 122"],
"STAT 22400 1": ["CRS Applied Regression Analysis Burbank 4:00 PM 6:00 PM R 6/9/2016 SS 122"],
"STAT 23400 1": ["CRS Statistical Models/Method-1 Dey 8:00 AM 10:00 AM T 6/7/2016 RO 015"],
"STAT 23400 2": ["CRS Statistical Models/Method-1 Collins 10:30 AM 12:30 PM T 6/7/2016 HGS 101"],
"STAT 23400 3": ["CRS Statistical Models/Method-1 Jahangoshahi 10:30 AM 12:30 PM R 6/9/2016 RO 015"],
"STAT 24500 1": ["CRS Statistical Theory/Method-2 Chatterjee 8:00 AM 10:00 AM T 6/7/2016 E 133"],
"STAT 24610 1": ["CRS Pattern Recognition Ke 10:30 AM 12:30 PM T 6/7/2016 E 133"],
"STAT 25100 1": ["CRS Intro To Math Probability Weare 1:30 PM 3:30 PM T 6/7/2016 HGS 101"],
"STAT 25150 1": ["CRS Intro to Math Probability - A Fefferman 10:30 AM 12:30 PM M 6/6/2016 E 133"],
"STAT 26700 1": ["CRS History of Statistics Stigler 10:30 AM 12:30 PM F 6/10/2016 E 133"],
"STAT 30210 1": ["CRS Bayesian Analysis and Principles of Statistics Stephens 1:30 PM 3:30 PM M 6/6/2016 GHJ 226"],
"STAT 31100 1": ["CRS Mathematical Computation III: Numerical Methods for PDE's Demanet 4:00 PM 6:00 PM W 6/8/2016 HM 145"],
"STAT 34700 1": ["CRS Generalized Linear Models Amit 1:30 PM 3:30 PM T 6/7/2016 E 133"],
"STAT 35400 1": ["CRS Gene Regulation Reinitz 1:30 PM 3:30 PM T 6/7/2016 GHJ 226"],
"STAT 37710 1": ["CRS Machine Learning Kondor 1:30 PM 3:30 PM R 6/9/2016 STU 101"],
"STAT 37790 1": ["CRS Topics in Statistical Machine Learning Lafferty 4:00 PM 6:00 PM R 6/9/2016 GHJ 226"],
"STAT 38300 1": ["CRS Measure-Theoretic Probability-III Ding 10:30 AM 12:30 PM M 6/6/2016 GHJ 226"],
"STAT 48100 1": ["CRS High-Dimensional Statistics II Barber 10:30 AM 12:30 PM T 6/7/2016 GHJ 226"],
"SWAH 25400 1": ["CRS Swahili-3 Mpiranya 10:30 AM 12:30 PM T 6/7/2016 C 218"],
"TAML 10300 1": ["CRS First-Year Tamil-3 Annamalai 1:30 PM 3:30 PM T 6/7/2016 C 218"],
"TAML 20300 1": ["CRS Second-Year Tamil-3 Annamalai 1:30 PM 3:30 PM R 6/9/2016 C 208"],
"TBTN 10300 1": ["CRS First-Year Tibetan-3 Ngodup 10:30 AM 12:30 PM M 6/6/2016 C 208"],
"TBTN 20300 1": ["CRS Second-Year Tibetan-3 Staff 4:00 PM 6:00 PM R 6/9/2016 C 224"],
"TURK 10103 1": ["LEC Elementary Turkish-3 Arik 10:30 AM 12:30 PM F 6/10/2016 C 201A-B"],
"TURK 10106 1": ["LEC Introduction to Old Turkic 2 Arik 1:30 PM 3:30 PM T 6/7/2016 C 207"],
"TURK 30503 1": ["LEC Ottoman Turkish-3 Anetshofer-Karateke 8:00 AM 10:00 AM T 6/7/2016 C 210"]
}
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond and greet the caller by name."""
message_body = request.values.get('Body', None)
classname = ""
if message_body in finals:
fullfinal = finals[message_body][0].split()
for item in fullfinal[1:len(fullfinal)-9]:
classname = classname + item + " "
newclassname = classname + ""
classprofessor = fullfinal[len(fullfinal)-9] + ""
classweekday = fullfinal[len(fullfinal)-4] + ""
classdate = fullfinal[len(fullfinal)-3] + ""
finalbegin = fullfinal[len(fullfinal)-8] + " " + fullfinal[len(fullfinal)-7] + ""
finalend = fullfinal[len(fullfinal)-6] + " " + fullfinal[len(fullfinal)-5] + ""
finalroom = fullfinal[len(fullfinal)-2] + " " + fullfinal[len(fullfinal)-1] + ""
yourfinal = "Class Name: " + newclassname + "\nProfessor: " + classprofessor + "\nDate: " + classweekday + ", " + classdate + "\n" + "Location: " + finalroom + "\nTime: " + finalbegin + " - " + finalend + ""
message = yourfinal
else:
message = "Class not found!\nMake sure you have the correct format of [Dept. Code] [Class Code] [Section Number].\nNote: You can find what section of a class is yours by going to http://classes.uchicago.edu.\nLastly, if you are sure you are sending the correct class, your class is not listed on the school's final exam schedule (you may have a seperate department exam schedule)."
resp = twilio.twiml.Response()
resp.message(message)
return str(resp)
if __name__ == "__main__":
app.run(debug=True)
| 46f41f457533d099048e8af0bcfca96200d93d34 | [
"Python",
"Text"
] | 3 | Text | saieedgeorge0/finalschedule | e855ead3427ca3a597c44dd795a2185292565356 | 3d2893d0d2864165d3e381facf1053acba2d2237 |
refs/heads/master | <repo_name>pscheil/PTXQC<file_sep>/inst/reportTemplate/PTXQC_report_template.Rmd
---
title: "ProTeomiX (PTX) Quality Control (QC) Report"
output:
html_document:
mathjax: null
number_sections: yes
toc: yes
pdf_document:
toc: yes
---
<!-- the TOC (table of content) will go here - use 'toc: no' above to disable the TOC -->
<style type="text/css">
p {
padding: 5px 0;
}
.helpSymbol {
border: 1px solid #AAAAAA;
padding: 3px;
text-align: center;
width: 100px;
}
.helpText {
clear: left;
}
</style>
```{r setup, include=FALSE}
## global options
knitr::opts_chunk$set(echo=FALSE, warning=FALSE, error=FALSE, message=FALSE, fig.width=10)
```
<a name="Overview"></a>
# Overview
**Quick guide**
- navigate using the 'Table of Content' above or just scroll down
- collapse sections by just clicking their name
- click on the "↓ Show Help" symbol to see a description for each metric and its scoring
- edit the config `r paste0("file:///", gsub("\\\\", "/", rprt_fns$yaml_file))` to
- set the report format (HTML, plainPDF, or both)
- disable metrics
- customize the order of metrics
- set individual target thresholds
- edit `r paste0("file:///", gsub("\\\\", "/", rprt_fns$filename_sorting))` to customize
- the order of Raw files in plots and heatmap
- abbreviations of Raw file names
- edit the Html template `r paste0("file:///", gsub("\\\\", "/", html_template))` to
- alter the look and feel of this report (e.g. colors, font and image sizes, help texts, etc.)
- report bugs and file requests via the [GitHub issue tracker](https://github.com/cbielow/PTXQC/issues)
- full PTXQC manual available [here](https://github.com/cbielow/PTXQC)
## HeatMap
```{r MQparams, echo=FALSE, results="asis"}
hm[["plot"]]
```
## Name Mapping
```{r nameMapping, echo=FALSE, results="asis"}
if(!is.null(pl_nameMapping) && (!any(is.na(pl_nameMapping)))) {
cat(pl_nameMapping$htmlTable)
}
```
## Metrics
```{r metrics, echo=FALSE, results="asis"}
#for (qcm in lst_qcMetrics_ord[1:3])
for (qcm in lst_qcMetrics)
{
if (length(qcm$plots) == 0) next; # skip empty metrics
#debug:
#cat(c("processing ", qcm$qcName))
v_titles = qcm$getTitles(subtitle_sep = "\n<b>")
plots = qcm$getPlots(withTitle = FALSE)
if (length(v_titles) != length(plots)) {
## this is weird...
stop("Number of titles is unequal to number of plots ...")
}
last_title = ""
for (i in 1:length(v_titles))
{
if (last_title != v_titles[i]) { ## print only new titles
cat(paste0("\n\n### ", gsub("\n", "\n<br>", v_titles[i]), "</b>\n\n"))
}
if (i == 1) { ## print helpText only once
cat(paste0('\n
<div>
<div class="helpSymbol">↓ Show Help</div>
<div class="helpText">', qcm$helpText, "</div>
</div>"))
}
if (!is.na(qcm$htmlTable))
{ ## if there is an HTML table, prefer it over the plots
cat(qcm$htmlTable)
break;
}
print(plots[[i]])
cat("\n<br>\n")
last_title = v_titles[i]
}
cat('\n<p style="text-align: right">[back to top](#Overview)</p>\n\n')
}
```
<script type="text/javascript">
jQuery(document).ready(function()
{
// initially hide the help text
jQuery(".helpText").hide();
// toggle help text by clicking on the symbol
jQuery(".helpSymbol").click(function(event)
{
var target = $( event.target );
// "Show Help ↓" is the first item in a <div> which encloses their section
// so toggling its sibling (the actual text) will just leave the "Show Help ↓" visible
if ( target.is( "div" ) )
{
target.siblings().slideToggle(200);
event.stopPropagation()
}
});
// enable shortening the report
jQuery(".section").click(function(event)
{
var target = $( event.target );
// headings are the first item in a <div> which encloses their section
// so toggling their siblings will just leave the heading visible
if ( target.is( "h1, h2, h3, h4" ) )
{
target.siblings().slideToggle(200);
event.stopPropagation()
}
});
});
</script>
<file_sep>/R/mzQC.R
#'
#' Get the information of each CV term from an obo file.
#'
#' @param cv_obo_file "xxx.obo"
#' @return A list containing cv term information
#'
#' @export
#'
parseOBO = function(cv_obo_file){
ontology = ontologyIndex::get_ontology(cv_obo_file)
obo = scan(file = cv_obo_file, what = "character")
return(obo)
}
<file_sep>/R/fcn_mqpar.R
#' Retrieve a parameter value from a mqpar.xml file
#'
#' If the file has the param, then return it as string.
#' If the file is missing, warning is shown and NULL is returned.
#' If the param (i.e. XML tag) is unknown or cannot be extracted, the program will quit (since this is a hard error).
#' When multiple occurrences of the param are found (usually due to parameter groups), we test if the values are all identical.
#' If so, the value is returned. If the values are different, a warning is emitted and NULL is returned.
#'
#' E.g. calling getMQPARValue("mqpar.xml", "firstSearchTol")
#' will look up the line
#' <firstSearchTol>20</firstSearchTol>
#' and return "20" (string!).
#'
#'
#' @param mqpar_filename Filename (incl. absolute or relative path) to the mqpar.xml file
#' @param param_name XML tag name, e.g. 'firstSearchTol' from which to read the value
#'
#' @return The stored value as string(!)
#'
#' @export
#'
getMQPARValue = function(mqpar_filename, param_name)
{
#param_name = "firstSearchTol"
#mqpar_filename = txt_files$mqpar
## TODO: at some point we might use a real XML parser, but for now, this would be overkill and
## also add another dependency library
if (!file.exists(pattern=mqpar_filename)) {
message("Info: The file '", mqpar_filename, "' was not found. MaxQuant parameters could not be extracted. Will fall back to default value, which however is only an approximation.",
" Please either: a) copy the mqpar.xml which was used for this MQ analysis into your TXT folder or,",
" b) make sure that you configure all YAML parameters whose name starts with 'MQpar_' correctly.", immediate. = TRUE)
return (NULL)
}
lines = readLines(con = mqpar_filename, warn = FALSE)
idx = grep(param_name, lines)
## is the tag present multiple times? (if yes, we found parameter groups)
results = gsub(paste0("[ ]*<", param_name, ">(.*)</", param_name, ">[ ]*"), "\\1", lines[idx])
## if regex did not work, the whole line will be returned, including the tag
if (length(grep(param_name, results)) > 0)
{
stop("getMQPARValue(): The parameter '", param_name, "' was found but could not be extracted from the line(s)\n ", paste(lines[idx], collapse="\n "), "\n Please contact the package support.", call. = FALSE)
}
if (length(unique(results)) > 1) {
warning("getMQPARValue(): The parameter '", param_name, "' was found more than once in the file '", mqpar_filename, "' with different values (probably due to usage of parameter groups).",
" PTXQC currently cannot deal with that -- the YAML param is going to be used. Sorry.", immediate. = TRUE);
return (NULL);
} else if (length(results) == 0) {
stop("getMQPARValue(): The parameter '", param_name, "' was not found in the file '", mqpar_filename, "'. Please contact the package support.", call. = FALSE);
}
## all tests passed, return the unique result
return (results[1])
}<file_sep>/R/MzQC_classes.R
## some generic information on why/how/what?? :)
##
## + we provide initialize() functions for all RefClasses to enable unnamed construction (shorter syntax)
##
##
##
##
##
##
# Defining this function to enable overload
# e.g. setMethod('asJSON', 'mzQC', function(x, ...) x$toJSON())
# which allows to use
# jsonlite::toJSON(mzQC$new(content))
asJSON <- jsonlite:::asJSON
#'
#' Tell if a string is undefined (NA or NULL); If yes, and its required by the mzQC standard, we can raise an error
#'
#' You can pass multiple strings, which are all checked. If any of them is undefined, the function returns TRUE
#'
#' @param s A string to be checked for NA/NULL
#' @param ... More strings to be checked
#' @param verbose If TRUE and 's' is NULL/NA, will print the name of the variable which was passed in
#'
#' @examples
#' isUndefined(NA) ## TRUE
#' isUndefined(NULL) ## TRUE
#' isUndefined(NA, NULL) ## TRUE
#' isUndefined("") ## FALSE
#' isUndefined("", NA) ## TRUE
#' isUndefined(1) ## FALSE
#' myVar = NA
#' isUndefined(myVar) ## TRUE, with warning "variable 'myVar' is NA/NULL!"
#'
#' @export
#'
isUndefined = function(s, ..., verbose = TRUE)
{
# anchor
if (missing(s)) return(FALSE)
r = (is.na(s) || is.null(s))
name_of_var = deparse(substitute(s))
# omit the '.self' part of the variable's name
name_of_var = gsub("^.self\\$", "", name_of_var)
if (verbose && r) warning(paste0("Variable '", name_of_var, "' is NA/NULL!"), immediate. = TRUE, call. = FALSE)
## check remaining args from ... by using '+' (force evaluation)
return(r + isUndefined(..., verbose = verbose) > 0)
}
#'
#' Checks validity (= completeness) of mzQC objects - or lists (JSON arrays) thereof
#'
#' Note: Returns TRUE for empty lists!
#'
#' You can pass multiple arguments, which are all checked individually.
#' All of them need to be valid, for TRUE to be returned.
#' The reason for combining list support for arguments and ellipsis (...) into this function is that
#' JSON arrays are represented as lists and you can simply pass them as a single argument
#' (without the need for do.call()) and get the indices of invalid objects (if any).
#' The ellipsis is useful to avoid clutter,
#' i.e.
#' if (!isValidMzQC(a) || !isValidMzQC(b)) doStuff()
#' is harder to read than
#' if (!isValidMzQC(a,b)) doStuff()
#'
#' @param x An mzQC refclass (or list of them), each will be subjected to `isValidMzQC()`
#' @param ... Ellipsis, for recursive argument splitting
#'
#' @examples
#' isValidMzQC(MzQCcvParameter$new("QC:4000059")) # FALSE
#' isValidMzQC(list(MzQCcvParameter$new("QC:4000059"))) # FALSE
#' isValidMzQC(list(MzQCcvParameter$new("QC:4000059", "Number of MS1 spectra"))) # TRUE
#' isValidMzQC(list(MzQCcvParameter$new("QC:4000059", "Number of MS1 spectra")),
#' MzQCcvParameter$new()) # FALSE
#'
#' @export
#'
isValidMzQC = function(x, ...)
{
# anchor
if (missing(x)) return(TRUE)
if ("list" %in% class(x)) {
idx = sapply(x, isValidMzQC)
if (any(idx == FALSE)) {
warning(paste0("In list of '", class(x[[1]]), "', the element(s) #[", paste(which(idx == FALSE), collapse = ","), "] is/are invalid."), immediate. = TRUE, call. = FALSE)
}
return(all(idx) & isValidMzQC(...))
}
r = x$isValid()
if (r == FALSE)
{
warning(paste0("A field in object of type ", class(x), " is invalid."), immediate. = TRUE, call. = FALSE)
}
return(r & isValidMzQC(...))
}
#'
#' Allow conversion of plain named lists to mzQC objects
#'
#' The plain-R representation of your mzQC objects must be wrapped in an outer list,
#' if your mzQC object representation is already a list
#' because upon detecting lists, this function will call 'class$fromData(element)' for every element.
#'
#' @param mzqc_class Prototype of the class to convert 'data' into
#' @param data A datastructure of R lists/arrays as obtained by 'jsonlite::fromJSON()'
#'
#' @examples
#' data = MzQCcvParameter$new("acc", "myName", "desc")
#' data_recovered = fromDatatoMzQC(MzQCcvParameter, list(jsonlite::fromJSON(jsonlite::toJSON(data))))
#' data_recovered
#'
#' @export
#'
fromDatatoMzQC = function(mzqc_class, data)
{
if ("list" %in% class(data))
{
return(sapply(data, function(x) {
obj = mzqc_class$new()
obj$fromData(x)
obj
}))
}
if (is.na(data) || is.null(data)) return(list())
obj = mzqc_class$new()
return(obj$fromData(data))
}
#'
#' converts a NULL to NA_character_; or returns the argument unchanged otherwise
#'
#' This is useful for missing list elements (which returns NULL),
#' but when the missing element in refClass should be NA_character_ (and NULL would return an error)
#'
#' @param char_or_NULL A string or NULL
#'
#' @examples
#' NULL_to_charNA(NA) ## NA
#' NULL_to_charNA(NULL) ## NA_character_
#' NULL_to_charNA("hi") ## "hi"
#'
#' @export
#'
NULL_to_charNA = function(char_or_NULL) {
if (is.null(char_or_NULL)) return(NA_character_)
return(char_or_NULL)
}
#'
#' converts a NULL to NA; or returns the argument unchanged otherwise
#'
#' This is useful for missing list elements (which returns NULL),
#' but when the missing element in refClass should be NA (and NULL would return an error)
#'
#' @param var_or_NULL A variable of any kind or NULL
#'
#' @examples
#' NULL_to_NA(NA) ## NA
#' NULL_to_NA(NULL) ## NA
#' NULL_to_NA("hi") ## "hi"
#'
#' @export
#'
NULL_to_NA = function(var_or_NULL) {
if (is.null(var_or_NULL)) return(NA)
return(var_or_NULL)
}
#'
#' An mzQC-formatted date+time, as required by the mzQC spec doc
#'
#' The format is "%Y-%m-%d %H:%M:%S".
#'
#' @field datetime A correctly formatted date time (use as read-only)
#'
#' @exportClass MzQCDateTime
#' @export MzQCDateTime
#'
#' @examples
#' dt1 = MzQCDateTime$new("1900-01-01")
#' dt2 = MzQCDateTime$new(Sys.time())
#' ## test roundtrip conversion from/to JSON
#' dt2$fromData(jsonlite::fromJSON(jsonlite::toJSON(dt1)))
# dt1$datetime == dt2$datetime ## TRUE
#'
#' @export
#'
MzQCDateTime = setRefClass(
'MzQCDateTime',
fields = list(datetime = 'character'),
methods = list(
initialize = function(date = as.character(Sys.time()))
{
set(date)
},
set = function(.self, date)
{
.self$datetime = format(as.POSIXct(date), "%Y-%m-%d %H:%M:%S")
},
isValid = function(.self)
{
return(TRUE) ## always valid, because it's designed that way
},
toJSON = function(.self, ...)
{
if (!isValidMzQC(.self)) stop(paste0("Object of class '", class(.self), "' is not in a valid state for writing to JSON"))
return(jsonlite::toJSON(.self$datetime))
},
fromData = function(.self, data)
{
.self$set(data)
}
)
)
setMethod('asJSON', 'MzQCDateTime', function(x, ...) x$toJSON(...))
#'
#' An controlled vocabulary document, usually pointing to an .obo file
#'
#' @field name Full name of the controlled vocabulary.
#' @field uri Publicly accessible URI of the controlled vocabulary.
#' @field version [optional] Version of the controlled vocabulary.
#'
#' @export MzQCcontrolledVocabulary
#'
#' @examples
#' MzQCcontrolledVocabulary$new(
#' "Proteomics Standards Initiative Quality Control Ontology",
#' "https://github.com/HUPO-PSI/qcML-development/blob/master/cv/v0_1_0/qc-cv.obo",
#' "0.1.0")
#' isValidMzQC(MzQCcontrolledVocabulary$new(
#' "Proteomics Standards Initiative Quality Control Ontology"))
#'
#' @export
#'
MzQCcontrolledVocabulary = setRefClass(
'MzQCcontrolledVocabulary',
fields = list(name = 'character',
uri = 'character',
version = 'character' # optional
),
methods = list(
initialize = function(name = NA_character_, uri = NA_character_, version = NA_character_)
{
.self$name = name
.self$uri = uri
.self$version = version
},
isValid = function(.self) {
if (isUndefined(.self$name, .self$uri)) return(FALSE)
return(TRUE)
},
toJSON = function(.self, ...)
{
if (!isValidMzQC(.self)) stop(paste0("Object of class '", class(.self), "' is not in a valid state for writing to JSON"))
r = list("name" = .self$name,
"uri" = .self$uri,
"version" = .self$version)
return (jsonlite::toJSON(r, auto_unbox = TRUE))
},
fromData = function(.self, data)
{
.self$name = data$name
.self$uri = data$uri
.self$version = NULL_to_charNA(data$version)
}
)
)
setMethod('asJSON', 'MzQCcontrolledVocabulary', function(x, ...) x$toJSON(...))
#'
#' An controlled vocabulary parameter, as detailed in the OBO file
#'
#' @field accession Accession number identifying the term within its controlled vocabulary (pattern: ^[A-Z]+:[A-Z0-9]+$).
#' @field name Name of the controlled vocabulary term describing the parameter.
#' @field value [optional] Value of the parameter.
#' @field description [optional] Definition of the controlled vocabulary term.
#'
#' @export MzQCcvParameter
#'
#' @examples
#' MzQCcvParameter$new("QC:4000139",
#' "RT acquisition range",
#' c(0.2959, 5969.8172))
#' isValidMzQC(MzQCcvParameter$new("MS:0000000"))
#'
#' @export
#'
MzQCcvParameter = setRefClass(
'MzQCcvParameter',
fields = list(accession = 'character',
name = 'character',
value = 'ANY', # optional
description = 'character' # optional
),
methods = list(
initialize = function(accession = NA_character_, name = NA_character_, value = NA, description = NA_character_)
{
.self$accession = accession
.self$name = name
.self$value = value
.self$description = description
},
isValid = function(.self) {
if (isUndefined(.self$accession, .self$name)) return(FALSE)
return(TRUE)
},
toJSON = function(.self, ...)
{
if (!isValidMzQC(.self)) stop(paste0("Object of class '", class(.self), "' is not in a valid state for writing to JSON"))
r = list("accession" = .self$accession,
"name" = .self$name)
if (!is.na(.self$description)) r["description"] = .self$description
if (!is.na(.self$value)) r["value"] = .self$value
return (jsonlite::toJSON(r, auto_unbox = TRUE))
},
fromData = function(.self, data)
{
.self$accession = data$accession
.self$name = data$name
.self$description = NULL_to_charNA(data$description)
.self$value = NULL_to_NA(data$value)
}
)
)
setMethod('asJSON', 'MzQCcvParameter', function(x, ...) x$toJSON(...))
#'
#' An inputfile within metadata for a run/setQuality
#'
#' @field name The name MUST uniquely match to a location (specified below) listed in the mzQC file.
#' @field location Unique file location, REQUIRED to be specified as a URI. The file URI is RECOMMENDED to be publicly accessible.
#' @field fileFormat A MzQCcvParameter with 'accession' and 'name'.
#' @field fileProperties An array of MzQCcvParameter, usually with 'accession', 'name' and 'value'. Recommended are at least two entries:
#' a) Completion time of the input file (MS:1000747) and b) Checksum of the input file (any child of: MS:1000561 ! data file checksum type).
#'
#' @export MzQCinputFile
#'
#'
#' @export
#'
MzQCinputFile = setRefClass(
'MzQCinputFile',
fields = list(name = 'character',
location = 'character',
fileFormat = 'MzQCcvParameter',
fileProperties = 'list' # array of MzQCcvParameter, optional
),
methods = list(
# defaults are required, otherwise refClasses do not work.
initialize = function(name = NA_character_, location = NA_character_, fileFormat = MzQCcvParameter$new(), fileProperties = list())
{
.self$name = name
.self$location = location
.self$fileFormat = fileFormat
.self$fileProperties = fileProperties
},
isValid = function(.self)
{
# force evaluation of all fields by '+'
if (isUndefined(.self$name, .self$location) + !.self$fileFormat$isValid()) return(FALSE)
return(isValidMzQC(.self$fileProperties)) ## TRUE for empty list, which is ok
},
toJSON = function(.self, ...)
{
if (!isValidMzQC(.self)) stop(paste0("Object of class '", class(.self), "' is not in a valid state for writing to JSON"))
# no need to check if optional field fileProperties is present. It will be an (empty) JSON array, which is what we want
return (jsonlite::toJSON(list(name = .self$name, location = .self$location, fileFormat = .self$fileFormat, fileProperties = .self$fileProperties)))
},
fromData = function(.self, data)
{
.self$name = data$name
.self$location = data$location
.self$fileFormat$fromData(data$fileFormat)
.self$fileProperties = fromDatatoMzQC(MzQCcvParameter, data$fileProperties) ## for lists, call the free function
}
)
)
setMethod('asJSON', 'MzQCinputFile', function(x, ...) x$toJSON(...))
#
# file_format = MzQCcvParameter$new("MS:1000584", "mzML format")
# nif = MzQCinputFile$new("tmp.mzML", "c:\\", file_format)
# nif
# nif2 = nif
# l2 = list(file_format, file_format)
# nif2$fileProperties = l2
# x = jsonlite::toJSON(nif, pretty = TRUE)
# x
# x2 = jsonlite::toJSON(nif2)
# xdata = jsonlite::fromJSON(x, simplifyDataFrame = FALSE)
# xdata
# class(fromDatatoMzQC(MzQCcvParameter, xdata$fileProperties)) == "list"
# jsonlite::toJSON(xdata, pretty = TRUE, auto_unbox = T)
# isValidMzQC(l2)
# nif$fromData(xdata)
#'
#' Details of the software used to create the QC metrics
#'
#' @field accession Accession number identifying the term within its controlled vocabulary (pattern: ^[A-Z]+:[A-Z0-9]+$).
#' @field name Name of the controlled vocabulary term describing the software tool.
#' @field version Version number of the software tool.
#' @field uri Publicly accessible URI of the software tool or documentation.
#' @field description [optional] Definition of the controlled vocabulary term.
#' @field value [optional] Value of the software tool.
#'
#' @export MzQCanalysisSoftware
#'
#'
#' @export
#'
MzQCanalysisSoftware = setRefClass(
'MzQCanalysisSoftware',
fields = list(accession = 'character',
name = 'character',
version = 'character',
uri = 'character',
description = 'character', # optional
value = 'character' # optional
),
methods = list(
# defaults are required, otherwise refClasses do not work.
initialize = function(accession = NA_character_,
name = NA_character_,
version = NA_character_,
uri = NA_character_,
description = NA_character_, ## optional
value = NA_character_ ## optional
)
{
.self$accession = accession
.self$name = name
.self$version = version
.self$uri = uri
.self$description = description
.self$value = value
},
isValid = function(.self)
{
if (isUndefined(.self$accession, .self$name, .self$version, .self$uri)) return(FALSE)
return(TRUE)
},
toJSON = function(.self, ...)
{
if (!isValidMzQC(.self)) stop(paste0("Object of class '", class(.self), "' is not in a valid state for writing to JSON"))
r = list("accession" = .self$accession,
"name" = .self$name,
"version" = .self$version,
"uri" = .self$uri)
if (!isUndefined(.self$description)) r$description = .self$description
if (!isUndefined(.self$value)) r$value = .self$value
return (jsonlite::toJSON(r))
},
fromData = function(.self, data)
{
.self$accession = data$accession
.self$name = data$name
.self$version = data$version
.self$uri = data$uri
.self$description = NULL_to_charNA(data$description)
.self$value = NULL_to_charNA(data$value)
}
)
)
setMethod('asJSON', 'MzQCanalysisSoftware', function(x, ...) x$toJSON(...))
#'
#'The metadata for a run/setQuality
#'
#' @field label Unique name for the run (for runQuality) or set (for setQuality).
#' @field inputFiles Array of MzQCinputFile objects
#' @field analysisSoftware Array of MzQCanalysisSoftware objects
#' @field cvParameters [optional] Array of cvParameters objects
#'
#' @export MzQCmetadata
#'
#' @export
#'
MzQCmetadata = setRefClass(
'MzQCmetadata',
fields = list(label = 'character',
inputFiles = 'list', # array of MzQCinputFile
analysisSoftware = 'list', # array of MzQCanalysisSoftware
cvParameters = 'list' # optional array of MzQCcvParameter
),
methods = list(
initialize = function(label = NA_character_, inputFiles =list(), analysisSoftware = list(), cvParameters = list())
{
.self$label = label
.self$inputFiles = inputFiles
.self$analysisSoftware = analysisSoftware
.self$cvParameters = cvParameters
},
isValid = function(.self)
{
# force evaluation of all fields by '+'
if (isUndefined(.self$label) + !isValidMzQC(.self$inputFiles, .self$analysisSoftware, .self$cvParameters)) return(FALSE)
return(TRUE)
},
toJSON = function(.self, ...)
{
if (!isValidMzQC(.self)) stop(paste0("Object of class '", class(.self), "' is not in a valid state for writing to JSON"))
r = list("label" = .self$label,
"inputFiles" = .self$inputFiles,
"analysisSoftware" = .self$analysisSoftware,
"cvParameters" = .self$cvParameters) ## might yield an empty JSON array,but ok
return (jsonlite::toJSON(r))
},
fromData = function(.self, data)
{
.self$label = data$label
.self$inputFiles = fromDatatoMzQC(MzQCinputFile, data$inputFiles)
.self$analysisSoftware = fromDatatoMzQC(MzQCanalysisSoftware, data$analysisSoftware)
.self$cvParameters = fromDatatoMzQC(MzQCcvParameter, data$cvParameters)
}
)
)
setMethod('asJSON', 'MzQCmetadata', function(x, ...) x$toJSON(...))
################################################################################################################################
#################################################################################################################################'
#' The central class to store QC information
#'
#' @field accession Accession number identifying the term within its controlled vocabulary (pattern: ^[A-Z]+:[A-Z0-9]+$).
#' @field name Name of the controlled vocabulary element describing the metric.
#' @field description [optional] Definition of the controlled vocabulary term.
#' @field value [optional] Value of the metric (single value, n-tuple, table, matrix).
#' The structure is not checked by our mzQC implementation and must be handled by the caller
#' @field unit [optional] Array of unit(s), stored as MzQcvParameter
#'
#' @export MzQCqualityMetric
#'
MzQCqualityMetric = setRefClass(
'MzQCqualityMetric',
fields = list(accession = 'character',
name = 'character',
description = 'character', # optional
value = 'ANY', # optional value of unspecified type
unit = 'list' # optional array of MzQCcvParameter
),
methods = list(
initialize = function(accession = NA_character_, name = NA_character_, description = NA_character_, value = NA, unit = list())
{
.self$accession = accession
.self$name = name
.self$description = description
if (!missing(value)) .self$value = value else .self$value = NA ## need to set as NA explicitly, because the default value 'uninitialized class ANY' cannot be converted to JSON
.self$unit = unit
},
isValid = function(.self)
{
if (isUndefined(.self$accession, .self$name)) return(FALSE)
return(TRUE)
},
toJSON = function(.self, ...)
{
if (!isValidMzQC(.self)) stop(paste0("Object of class '", class(.self), "' is not in a valid state for writing to JSON"))
r = list("accession" = .self$accession,
"name" = .self$name,
"description" = .self$description,
"value" = .self$value, ## NA is written as "value": [null] and read back as NA
"unit" = .self$unit) ## might yield an empty JSON array, but ok
return (jsonlite::toJSON(r))
},
fromData = function(.self, data)
{
.self$accession = data$accession
.self$name = data$name
.self$description = NULL_to_charNA(data$description)
if (!is.na(data$value)) .self$value = data$value
.self$unit = fromDatatoMzQC(MzQCcvParameter, data$unit) ## if data$unit is empty, or NA, the empty list will be returned
}
)
)
setMethod('asJSON', 'MzQCqualityMetric', function(x, ...) x$toJSON(...))
a_qc_metric = MzQCqualityMetric$new("acc", "nnam")
xq = jsonlite::toJSON(a_qc_metric)
jsonlite::fromJSON(xq)
#'
#' Base class of runQuality/setQuality
#'
#' @field metadata The metadata for this run/setQuality
#' @field qualityMetrics Array of MzQCqualityMetric objects
#'
#' @export MzQCbaseQuality
#'
#'
MzQCbaseQuality = setRefClass(
'MzQCbaseQuality',
fields = list(metadata = 'MzQCmetadata',
qualityMetrics = 'list'), # array of MzQCqualityMetric
methods = list(
initialize = function(metadata = NA, qualityMetrics = list())
{
.self$metadata = metadata
.self$qualityMetrics = qualityMetrics
},
isValid = function(.self)
{
if (!isValidMzQC(.self$metadata, .self$qualityMetrics)) return(FALSE)
return(TRUE)
},
toJSON = function(.self, ...)
{
if (!isValidMzQC(.self)) stop(paste0("Object of class '", class(.self), "' is not in a valid state for writing to JSON"))
r = list("metadata" = .self$metadata,
"qualityMetrics" = .self$qualityMetrics)
return (jsonlite::toJSON(r))
},
fromData = function(.self, data)
{
.self$metadata = data$metadata
.self$qualityMetrics = fromDatatoMzQC(MzQCbaseQuality, data$qualityMetrics) ## if data$qualityMetrics is empty, or NA, the empty list will be returned
}
)
)
setMethod('asJSON', 'MzQCbaseQuality', function(x, ...) x$toJSON(...))
###########################################################################
#' Root element of an mzQC document
#'
#' At least one of runQualities or setQualities MUST be present.
#'
#' @field version Version of the mzQC format.
#' @field creationDate Creation date of the mzQC file.
#' @field contactName Name of the operator/creator of this mzQC file.
#' @field contactAddress Contact address (mail/e-mail or phone)
#' @field readMe Description and comments about the mzQC file contents.
#' @field runQualities Array of MzQCbaseQuality;
#' @field setQualities Array of MzQCbaseQuality
#' @field controlledVocabularies Array of CV domains used (obo files)
#'
#' @export MzQCmzQC
#'
#'
MzQCmzQC = setRefClass(
'MzQCmzQC',
fields = list(version = 'character',
creationDate = 'MzQCDateTime',
contactName = 'character', # optional
contactAddress = 'character', # optional
readMe = 'character', # optional
runQualities = 'list', # either this ... or (array of MzQCbaseQuality)
setQualities = 'list', # ... this must be present (array of MzQCbaseQuality)
controlledVocabularies = 'list'), # array of MzQCcontrolledVocabulary
methods = list(
initialize = function(version = NA_character_,
creationDate = MzQCDateTime$new(),
contactName = NA_character_,
contactAddress = NA_character_,
readMe = NA_character_,
runQualities = list(),
setQualities = list(),
controlledVocabularies = list())
{
.self$version = version
.self$creationDate = creationDate
.self$contactName = contactName
.self$contactAddress = contactAddress
.self$readMe = readMe
.self$runQualities = runQualities
.self$setQualities = setQualities
.self$controlledVocabularies = controlledVocabularies
},
isValid = function(.self)
{
# force evaluation using '+'
if (isUndefined(.self$version) +
!isValidMzQC(.self$creationDate, .self$runQualities, .self$setQualities, .self$controlledVocabularies)) return(FALSE)
# at least one must be present
if (length(.self$runQualities) + length(.self$setQualities) == 0) return(FALSE)
return(TRUE)
},
toJSON = function(.self, ...)
{
if (!isValidMzQC(.self)) stop(paste0("Object of class '", class(.self), "' is not in a valid state for writing to JSON"))
r = list("version" = .self$version,
"creationDate" = .self$creationDate)
if (!isUndefined(.self$contactName)) r$contactName = .self$contactName
if (!isUndefined(.self$contactAddress)) r$contactAddress = .self$contactAddress
if (!isUndefined(.self$readMe)) r$readMe = .self$readMe
r$runQualities = .self$runQualities
r$setQualities = .self$setQualities
r$controlledVocabularies = .self$controlledVocabularies
return (jsonlite::toJSON(r))
},
fromData = function(.self, data)
{
.self$version = data$version
.self$creationDate = fromDatatoMzQC(MzQCDateTime, data$creationDate)
.self$contactName = NULL_to_charNA(data$contactName)
.self$contactAddress = NULL_to_charNA(data$contactAddress)
.self$readMe = NULL_to_charNA(data$readMe)
.self$runQualities = fromDatatoMzQC(MzQCbaseQuality, data$runQualities) ## if data$runQualities is empty, or NA, the empty list will be returned
.self$setQualities = fromDatatoMzQC(MzQCbaseQuality, data$setQualities) ## if data$setQualities is empty, or NA, the empty list will be returned
.self$controlledVocabularies = fromDatatoMzQC(MzQCcontrolledVocabulary, data$controlledVocabularies) ## if data$controlledVocabularies is empty, or NA, the empty list will be returned
}
)
)
setMethod('asJSON', 'MzQCbaseQuality', function(x, ...) x$toJSON(...)) | 4a553bddec172c97639c946d47dad012847d05a1 | [
"R",
"RMarkdown"
] | 4 | RMarkdown | pscheil/PTXQC | e4d37b8a83546262f3694965ad1985b1478b541c | 2e9791361aaa1f11589282e5579b2fec31cafe5d |
refs/heads/master | <repo_name>mat013/eclipseprefs<file_sep>/bash/.bash_aliases
alias gg='git gui &'
alias ggg='gitk &'
alias gosources='cd ~/sources'
alias govagrant='cd ~/vagrant'
<file_sep>/bash/bash_aliases
alias gg='git gui &'
alias ggg='gitk &'
alias gitchanges='git log -p --'
alias gitundo='git reset HEAD~'
alias gosources='cd ~/sources'
alias govagrant='cd ~/vagrant'
export VISUAL=vi
export EDITOR="$VISUAL"
| bbe0b43d38801fa605170f6bf6c554b45f598ece | [
"Shell"
] | 2 | Shell | mat013/eclipseprefs | 469f4215f1cb783b9b1b8b9dc6dcf719632f851f | a4e203f51c58340addef895b5f2dd2337b963240 |
refs/heads/main | <repo_name>hzh595395786/wsc_django<file_sep>/wsc_django/wsc_django/apps/config/migrations/0001_initial.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MsgNotify',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('order_confirm_wx', models.BooleanField(default=False, verbose_name='开始配送/等待自提-微信')),
('order_confirm_msg', models.BooleanField(default=False, verbose_name='开始配送/等待自提-短信')),
('order_finish_wx', models.BooleanField(default=False, verbose_name='订单完成-微信')),
('order_finish_msg', models.BooleanField(default=False, verbose_name='订单完成-短信')),
('order_refund_wx', models.BooleanField(default=False, verbose_name='订单退款-微信')),
('order_refund_msg', models.BooleanField(default=False, verbose_name='订单退款-短信')),
('group_success_wx', models.BooleanField(default=False, verbose_name='成团提醒-微信')),
('group_success_msg', models.BooleanField(default=False, verbose_name='成团提醒-短信')),
('group_failed_wx', models.BooleanField(default=False, verbose_name='拼团失败-微信')),
('group_failed_msg', models.BooleanField(default=False, verbose_name='拼团失败-短信')),
],
options={
'verbose_name': '消息通知',
'verbose_name_plural': '消息通知',
'db_table': 'msgnotfiy',
},
),
migrations.CreateModel(
name='Printer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('type', models.SmallIntegerField(default=1, verbose_name='打印机类型1:本地2:云, 预留')),
('brand', models.SmallIntegerField(verbose_name='打印机品牌 1:易联云, 2:飞印, 3:佛山喜讯, 4:365 S1, 5:365 S2, 6:森果')),
('code', models.CharField(default='', max_length=32, verbose_name='打印机终端号')),
('key', models.CharField(default='', max_length=32, verbose_name='打印机秘钥')),
('temp_id', models.SmallIntegerField(default=1, verbose_name='打印模板, 预留')),
('auto_print', models.SmallIntegerField(default=1, verbose_name='订单自动打印')),
('status', models.SmallIntegerField(default=1, verbose_name='打印机状态,预留')),
],
options={
'verbose_name': '打印机',
'verbose_name_plural': '打印机',
'db_table': 'printer',
},
),
migrations.CreateModel(
name='Receipt',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('bottom_msg', models.CharField(default='', max_length=128, verbose_name='小票底部信息')),
('bottom_qrcode', models.CharField(default='', max_length=128, verbose_name='小票底部二维码')),
('bottom_image', models.CharField(default='', max_length=512, verbose_name='小票底部图片,预留')),
('brcode_active', models.SmallIntegerField(default=0, verbose_name='打印订单号条码')),
('copies', models.SmallIntegerField(default=1, verbose_name='小票打印份数')),
],
options={
'verbose_name': '小票',
'verbose_name_plural': '小票',
'db_table': 'receipt',
},
),
migrations.CreateModel(
name='ShareSetup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('custom_title_name', models.CharField(default='', max_length=64, verbose_name='自定义分享标题名称')),
('custom_share_description', models.CharField(default='', max_length=64, verbose_name='自定义分享描述')),
],
options={
'verbose_name': '分享设置',
'verbose_name_plural': '分享设置',
'db_table': 'share_setup',
},
),
migrations.CreateModel(
name='SomeConfig',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('show_off_product', models.BooleanField(default=True, verbose_name='货品板块显示已下架货品')),
('new_order_voice', models.BooleanField(default=True, verbose_name='新订单语音提醒')),
('weixin_jsapi', models.BooleanField(default=False, verbose_name='是否开启微信支付')),
('on_delivery', models.BooleanField(default=True, verbose_name='是否开启货到付款')),
],
options={
'verbose_name': '一些杂乱的配置项',
'verbose_name_plural': '一些杂乱的配置项',
'db_table': 'some_config',
},
),
]
<file_sep>/wsc_django/wsc_django/apps/groupon/constant.py
class GrouponType:
NORMAL = 1 # 普通拼团
MENTOR = 2 # 老带新
class GrouponStatus:
ON = 1 # 启用
OFF = 2 # 停用
EXPIRED = 3 # 过期
class GrouponAttendStatus:
EXPIRED = -1 # 已过期
CREATED = 0 # 已创建
WAITTING = 1 # 拼团中
SUCCEEDED = 2 # 已成团
FAILED = 3 # 已失败
class GrouponAttendLineStatus:
EXPIRED = -1 # 已过期
UNPAID = 0 # 未付款
PAID = 1 # 已付款
<file_sep>/wsc_django/wsc_django/apps/printer/services.py
import datetime
import hashlib
import time
import requests
import jinja2
from django.utils.timezone import make_aware
from config.services import get_receipt_by_shop_id, get_printer_by_shop_id
from logs.constant import OrderLogType
from logs.services import create_order_log
from order.models import Order
from shop.services import get_shop_by_shop_id
ORDER_TPL_58 = (
"<FS><center>{{shop.shop_name}}</center></FS>"
+ "订单号: {{order.num}}\n"
+ "{% if receipt_config.brcode_active %}<BR3>{{order.num}}</BR3>{% endif %}"
+ "下单时间: {{order.create_time}}\n"
+ "打印时间: {{print_time}}\n"
+ "**************商品**************\n"
+ "<table>"
+ "<tr>"
+ "<td>商品名</td>"
+ "<td>单价</td>"
+ "<td>数量</td>"
+ "<td>小计</td>"
+ "</tr>"
+ "{% for order_line in order.order_lines %}"
+ "<tr>"
+ "<td>{{order_line.product_name}}</td>"
+ "<td>{{order_line.price_net | round(2)}}</td>"
+ "<td>{{order_line.quantity_net | round(2)}}</td>"
+ "<td>{{order_line.amount_net | round(2)}}</td>"
+ "</tr>"
+ "{% endfor %}"
+ "</table>"
+ "********************************"
+ "<right>{{order.delivery_amount_text}}: {{order.delivery_amount_net | round(2)}}</right>"
+ "<FS><right>合计:{{order.total_amount_net | round(2)}}元</right></FS>"
+ "<right>{{order.pay_type_text}}</right>\n"
+ "********************************"
+ "<FS>客户: {{order.address.name}} {% if order.address.sex %}{{order.address.sex_text}}{% endif %}</FS>\n"
+ "<FS>电话: {{order.address.phone}}</FS>\n"
+ "{% if order.delivery_method == 1 %}<FS>地址: {{ order.address.full_address }}</FS>\n{% endif %}"
+ "{% if order.remark %}<FS>备注: {{order.remark}}</FS>\n{% endif %}"
+ "********************************"
+ "{% if receipt_config.bottom_msg %}{{receipt_config.bottom_msg}}\n{% endif %}"
+ "{% if receipt_config.bottom_qrcode %}<QR>{{receipt_config.bottom_qrcode}}</QR>{% endif %}"
+ "<center>技术支持: 森果 senguo.cc</center>"
)
class ylyPrinter:
"""易联云打印机"""
def send_request(self, data, copy):
""" 发送打印请求
:param data: 打印体
:param copy: 打印份数
"""
if not data:
return False, "易联云打印失败,请在店铺设置中检查打印机终端号是否正确设置"
try:
for _ in range(1, copy + 1):
r = requests.post(
"http://open.10ss.net:8888", data=data, timeout=(1, 5)
)
text = int(eval(r.text)["state"])
except:
return False, "易联云打印接口返回异常,请稍后重试"
if text == 1:
return True, ""
elif text in [3, 4]:
return False, "易联云打印失败,请在店铺设置中检查打印机终端号是否正确设置"
else:
return False, "易联云打印失败,错误代码:%s"%text
def print_order(order: Order, user_id: int = 0):
"""
订单打印
:param order:
:param user_id:
:return:
"""
shop_id = order.shop.id
shop = get_shop_by_shop_id(shop_id)
receipt_config = get_receipt_by_shop_id(shop_id)
printer = ylyPrinter()
template = jinja2.Template(ORDER_TPL_58)
body = template.render(
order=order,
print_time=make_aware(datetime.datetime.now()).strftime("%Y-%m-%d %H:%M:%S"),
shop=shop,
receipt_config=receipt_config,
)
printer_config = get_printer_by_shop_id(shop_id)
if not printer_config:
return False, "请先添加打印机"
partner = "1693" # 用户ID
apikey = "664466347d04d1089a3d373ac3b6d985af65d78e" # API密钥
timenow = str(int(time.time())) # 当前时间戳
machine_code = printer_config.code # 打印机终端号 520
mkey = printer_config.key # 打印机密钥 110110
if machine_code and mkey:
sign = "{}machine_code{}partner{}time{}{}".format(
apikey, machine_code, partner, timenow, mkey
)
sign = hashlib.md5(sign.encode("utf-8")).hexdigest().upper()
else:
return False, "打印机配置错误"
data = {
"partner": partner,
"machine_code": machine_code,
"content": body,
"time": timenow,
"sign": sign,
}
success, msg = printer.send_request(data, receipt_config.copies)
if success and user_id >= 0:
log_info = {
"order_num": order.order_num,
"shop_id": order.shop.id,
"operator_id": user_id,
"operate_type": OrderLogType.PRINT,
}
create_order_log(log_info)
return success, msg<file_sep>/wsc_django/wsc_django/apps/customer/views.py
from django.http import QueryDict
from rest_framework import status
from webargs.djangoparser import use_args
from webargs import fields, validate
from customer.constant import MineAddressDefault
from order.constant import OrderType, OrderPayType, OrderDeliveryMethod, OrderStatus
from order.serializers import AdminOrdersSerializer
from user.constant import Sex
from wsc_django.utils.arguments import StrToList
from wsc_django.utils.pagination import StandardResultsSetPagination
from wsc_django.utils.views import AdminBaseView, MallBaseView
from customer.interface import list_customer_orders_interface
from customer.serializers import (
AdminCustomerSerializer,
AdminCustomerPointsSerializer,
MallMineAddressSerializer,
)
from customer.services import (
update_customer_remark,
get_mine_address_by_id,
list_customer_by_shop_id,
delete_mine_address_by_id,
list_customer_point_by_customer_id,
get_customer_by_customer_id_and_shop_id,
list_mine_address_by_user_id_and_shop_id,
get_mine_default_address_by_user_id_and_shop_id,
)
class AdminCustomerView(AdminBaseView):
"""后台-客户-客户详情"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CUSTOMER]
)
@use_args({"customer_id": fields.Integer(required=True, comment="客户ID")}, location="query")
def get(self, request, args):
customer_id = args.get("customer_id")
customer = get_customer_by_customer_id_and_shop_id(
customer_id,
self.current_shop.id,
with_user_info=True
)
if customer:
serializer = AdminCustomerSerializer(customer)
return self.send_success(data=serializer.data)
else:
return self.send_fail(error_text="客户不存在")
class AdminCustomersView(AdminBaseView):
"""后台-客户-客户列表"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CUSTOMER]
)
@use_args(
{
"sort_prop": fields.String(
required=False,
missing="",
validate=[validate.OneOf(["", "consume_amount", "consume_count"])],
comment="排序字段",
),
"sort": fields.Function(
deserialize=lambda x: x.rstrip("ending"),
required=False,
missing="",
validate=[validate.OneOf(["", "asc", "desc"])],
comment="排序方式, +:正序,-:倒序",
),
"keyword": fields.String(
required=False, missing="", comment="搜索关键字,昵称或者手机号"
),
},
location="query"
)
def get(self, request, args):
shop = self.current_shop
customer_list = list_customer_by_shop_id(shop.id, **args)
customer_list = self._get_paginated_data(customer_list, AdminCustomerSerializer)
return self.send_success(data_list=customer_list)
class AdminCustomerRemarkView(AdminBaseView):
"""后台-客户-更改客户备注"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CUSTOMER]
)
@use_args(
{
"customer_id": fields.Integer(required=True, comment="客户ID"),
"remark": fields.String(required=True, validate=[validate.Length(0, 20)]),
},
location="json",
)
def put(self, request, args):
shop = self.current_shop
customer = get_customer_by_customer_id_and_shop_id(args.get("customer_id"), shop.id)
if not customer:
return self.send_fail(error_text="客户或customer_id不存在")
update_customer_remark(customer, args.get("remark"))
return self.send_success()
class AdminCustomerPointsView(AdminBaseView):
"""后台-客户-历史积分查询"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CUSTOMER]
)
@use_args(
{
"customer_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="客户ID"
),
},
location="query"
)
def get(self, request, args):
shop = self.current_shop
customer_id = args.get("customer_id")
customer = get_customer_by_customer_id_and_shop_id(customer_id, shop.id)
if not customer:
return self.send_fail(error_text="客户不存在")
customer_point_list = list_customer_point_by_customer_id(customer.id)
customer_point_list = self._get_paginated_data(customer_point_list, AdminCustomerPointsSerializer)
return self.send_success(data_list=customer_point_list)
class AdminCustomerOrdersView(AdminBaseView):
"""后台-客户-历史订单查询"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CUSTOMER]
)
@use_args(
{
"customer_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="客户ID"
),
"order_types": StrToList(
required=False,
missing=[],
validate=[validate.ContainsOnly([OrderType.NORMAL, OrderType.GROUPON])],
comment="订单类型, 1: 普通订单, 5: 拼团订单",
),
"order_pay_types": StrToList(
required=False,
missing=[],
validate=[
validate.ContainsOnly(
[OrderPayType.WEIXIN_JSAPI, OrderPayType.ON_DELIVERY]
)
],
comment="订单支付类型, 1: 微信支付, 2: 货到付款",
),
"order_delivery_methods": StrToList(
required=False,
missing=[],
validate=[
validate.ContainsOnly(
[
OrderDeliveryMethod.HOME_DELIVERY,
OrderDeliveryMethod.CUSTOMER_PICK,
]
)
],
comment="配送类型, 1: 送货上门, 2: 自提",
),
"order_status": StrToList(
required=False,
missing=[
OrderStatus.PAID,
OrderStatus.CONFIRMED,
OrderStatus.FINISHED,
OrderStatus.REFUNDED,
],
validate=[
validate.ContainsOnly(
[
OrderStatus.PAID,
OrderStatus.CONFIRMED,
OrderStatus.FINISHED,
OrderStatus.REFUNDED,
]
)
],
comment="订单状态, 2: 未处理, 3: 处理中, 4: 已完成, 5: 已退款",
),
},
location="query"
)
def get(self, request, args):
args["shop_id"] = self.current_shop.id
order_list = list_customer_orders_interface(**args)
order_list = self._get_paginated_data(order_list, AdminOrdersSerializer)
return self.send_success(data_list=order_list)
class MallMineAddressView(MallBaseView):
"""商城-我的-添加收货地址&获取收货地址列表&编辑收货地址&删除地址"""
pagination_class = StandardResultsSetPagination
def get(self, request, shop_code):
self._set_current_shop(request, shop_code)
user = self.current_user
shop = self.current_shop
mine_address_list = list_mine_address_by_user_id_and_shop_id(user.id, shop.id)
mine_address_list = self._get_paginated_data(mine_address_list, MallMineAddressSerializer)
return self.send_success(data_list=mine_address_list)
@use_args(
{
"name": fields.String(
required=True,
validate=[validate.Length(1)],
comment="收货人姓名",
),
"sex": fields.Integer(
required=False,
default=Sex.UNKNOWN,
validate=[validate.OneOf([Sex.UNKNOWN, Sex.MALE, Sex.FEMALE])],
comment="性别",
),
"phone": fields.String(required=True, comment="手机号"),
"province": fields.Integer(
required=True, comment="省份编码"
),
"city": fields.Integer(required=True, comment="城市编码"),
"county": fields.Integer(
required=True, comment="区份编码"
),
"address": fields.String(
required=True,
validate=[validate.Length(1, 50)],
comment="详细地址",
),
'added': fields.String(
required=False,
allow_none=True,
comment="补充说明",
),
"default": fields.Integer(
required=False,
default=MineAddressDefault.NO,
validate=[validate.OneOf([MineAddressDefault.YES, MineAddressDefault.NO])],
comment="是否为默认地址",
),
"longitude": fields.Float(
required=False,
validate=[validate.Range(-180, 180)],
comment="经度",
),
"latitude": fields.Float(
required=False,
validate=[validate.Range(-90, 90)],
comment="纬度",
),
},
location="json",
)
def post(self, request, args, shop_code):
self._set_current_shop(request, shop_code)
serializer = MallMineAddressSerializer(data=args, context={"self": self})
if not serializer.is_valid():
return self.send_error(
error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return self.send_success(data=serializer.data)
@use_args(
{
"address_id": fields.Integer(
required=True,
validate=[validate.Range(1)],
comment="地址ID",
),
"name": fields.String(required=True, comment="收货人姓名"),
"sex": fields.Integer(
required=False,
default=Sex.UNKNOWN,
validate=[validate.OneOf([Sex.UNKNOWN, Sex.MALE, Sex.FEMALE])],
comment="性别",
),
"phone": fields.String(required=True, comment="手机号"),
"province": fields.Integer(
required=True, comment="省份编码"
),
"city": fields.Integer(required=True, comment="城市编码"),
"county": fields.Integer(
required=True, comment="区份编码"
),
"address": fields.String(
required=True,
validate=[validate.Length(1, 50)],
comment="详细地址",
),
'added': fields.String(
required=False,
allow_none=True,
comment="补充说明",
),
"default": fields.Integer(
required=False,
default=MineAddressDefault.NO,
validate=[validate.OneOf([MineAddressDefault.NO, MineAddressDefault.YES])],
comment="是否为默认地址",
),
"longitude": fields.Float(
required=False,
validate=[validate.Range(-180, 180)],
comment="经度",
),
"latitude": fields.Float(
required=False,
validate=[validate.Range(-90, 90)],
comment="纬度",
),
},
location="json",
)
def put(self, request, args, shop_code):
self._set_current_shop(request, shop_code)
address_id = args.pop("address_id")
user = self.current_user
shop = self.current_shop
mine_address = get_mine_address_by_id(address_id, user.id, shop.id)
if not mine_address:
return self.send_fail(error_text="地址不存在")
serializer = MallMineAddressSerializer(mine_address, data=args, context={"self": self})
if not serializer.is_valid():
return self.send_error(
error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return self.send_success()
@use_args(
{
"id": fields.Integer(
required=True,
validate=[validate.Range(1)],
data_key="address_id",
comment="地址ID",
)
},
location="json",
)
def delete(self, request, args, shop_code):
self._set_current_shop(request, shop_code)
user = self.current_user
shop = self.current_shop
address_id = args.get("address_id")
ret, info = delete_mine_address_by_id(address_id, user.id, shop.id)
if not ret:
return self.send_fail(error_text=info)
else:
return self.send_success()
class MallMineDefaultAddressView(MallBaseView):
"""商城-获取一个用户的默认地址"""
def get(self, request, shop_code):
self._set_current_shop(request, shop_code)
user = self.current_user
shop = self.current_shop
default_address = get_mine_default_address_by_user_id_and_shop_id(user.id, shop.id)
if not default_address:
return self.send_fail(error_text="还未设置默认地址")
serializer = MallMineAddressSerializer(default_address)
return self.send_success(data=serializer.data)<file_sep>/wsc_django/wsc_django/apps/payment/interface.py
from order.models import Order
from order.services import set_order_paid
from user.services import get_openid_by_user_id_and_appid, create_user_openid
def get_user_openid_interface(user_id: int, mp_appid: str):
"""通过用户id和公众号的mp_appid获取用户的wx_openid"""
return get_openid_by_user_id_and_appid(user_id, mp_appid)
def create_user_openid_interface(user_id: int, mp_appid: str, wx_openid: str):
"""创建用户openID """
return create_user_openid(user_id, mp_appid, wx_openid)
def pay_order_interfaces(order: Order):
"""将订单设置为已支付状态,同时生成一些其他的数据, 带commit"""
return set_order_paid(order)<file_sep>/wsc_django/wsc_django/apps/customer/urls.py
"""
客户相关的路由
"""
from django.urls import path, re_path
from customer import views
urlpatterns_admin = [
path('api/admin/customer/', views.AdminCustomerView.as_view()), # 客户详情
path('api/admin/customers/', views.AdminCustomersView.as_view()), # 客户列表
path('api/admin/customer/remark/', views.AdminCustomerRemarkView.as_view()), # 修改客户备注
path('api/admin/customer/points/', views.AdminCustomerPointsView.as_view()), # 获取客户历史积分
path('api/admin/customer/orders/', views.AdminCustomerOrdersView.as_view()), # 获取客户历史订单
]
urlpatterns_mall = [
re_path(r'^api/mall/mine/address/(?P<shop_code>\w+)/$', views.MallMineAddressView.as_view()), # 添加送货地址&修改送货地址&删除送货地址&查询用的所有送货地址
re_path(r'^api/mall/mine/default/address/(?P<shop_code>\w+)/$', views.MallMineDefaultAddressView.as_view()), # 获取一个客户的默认地址
]
urlpatterns = urlpatterns_admin + urlpatterns_mall<file_sep>/wsc_django/wsc_django/apps/logs/migrations/0001_initial.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ConfigLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('shop_id', models.IntegerField(verbose_name='商铺id')),
('operate_time', models.DateTimeField(auto_now_add=True, verbose_name='操作时间')),
('operate_type', models.SmallIntegerField(verbose_name='操作类型')),
('operate_content', models.CharField(default='', max_length=512, verbose_name='操作内容')),
],
options={
'verbose_name': '设置模块操作日志',
'verbose_name_plural': '设置模块操作日志',
'db_table': 'config_log',
},
),
migrations.CreateModel(
name='OperateLogUnify',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('shop_id', models.IntegerField(verbose_name='商铺id')),
('operate_time', models.DateTimeField(auto_now_add=True, verbose_name='操作时间')),
('operate_module', models.SmallIntegerField(verbose_name='操作模块')),
('log_id', models.IntegerField(verbose_name='子模块的操作记录id')),
],
options={
'verbose_name': '操作记录',
'verbose_name_plural': '操作记录',
'db_table': 'operate_log_unify',
},
),
migrations.CreateModel(
name='OrderLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('shop_id', models.IntegerField(verbose_name='商铺id')),
('operate_time', models.DateTimeField(auto_now_add=True, verbose_name='操作时间')),
('operate_type', models.SmallIntegerField(verbose_name='操作类型')),
('operate_content', models.CharField(default='', max_length=512, verbose_name='操作内容')),
('order_num', models.CharField(max_length=20, verbose_name='订单号')),
('order_id', models.IntegerField(verbose_name='订单id')),
],
options={
'verbose_name': '订单日志',
'verbose_name_plural': '订单日志',
'db_table': 'order_log',
},
),
migrations.CreateModel(
name='ProductLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('shop_id', models.IntegerField(verbose_name='商铺id')),
('operate_time', models.DateTimeField(auto_now_add=True, verbose_name='操作时间')),
('operate_type', models.SmallIntegerField(verbose_name='操作类型')),
('operate_content', models.CharField(default='', max_length=512, verbose_name='操作内容')),
],
options={
'verbose_name': '货品日志',
'verbose_name_plural': '货品日志',
'db_table': 'product_log',
},
),
migrations.CreateModel(
name='PromotionLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('shop_id', models.IntegerField(verbose_name='商铺id')),
('operate_time', models.DateTimeField(auto_now_add=True, verbose_name='操作时间')),
('operate_type', models.SmallIntegerField(verbose_name='操作类型')),
('operate_content', models.CharField(default='', max_length=512, verbose_name='操作内容')),
],
options={
'verbose_name': '玩法日志',
'verbose_name_plural': '玩法日志',
'db_table': 'promotion_log',
},
),
migrations.CreateModel(
name='StaffLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('shop_id', models.IntegerField(verbose_name='商铺id')),
('operate_time', models.DateTimeField(auto_now_add=True, verbose_name='操作时间')),
('operate_type', models.SmallIntegerField(verbose_name='操作类型')),
('operate_content', models.CharField(default='', max_length=512, verbose_name='操作内容')),
('staff_id', models.IntegerField(verbose_name='被操作的员工ID')),
],
options={
'verbose_name': '员工日志',
'verbose_name_plural': '员工日志',
'db_table': 'staff_log',
},
),
]
<file_sep>/wsc_django/wsc_django/utils/core.py
"""项目要用到的一些类和函数"""
import datetime
import re
import random
import requests
from django_redis import get_redis_connection
from rest_framework import serializers
from settings import BAIDU_APIKEY, BAIDU_SECRETKEY
from wsc_django.utils.region_file import REGION
ORDER_SHOP_TYPE_PREFIX = "60" # 微商城系统店铺类型固定订单前缀
class Random:
"""随机逻辑相关的类"""
@classmethod
def gen_random_str(cls, str_length):
init_str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
random_str = ''
for i in range(str_length):
random_str += random.choice(init_str)
return random_str
class FuncField(serializers.Field):
"""传入一个匿名函数,将该字段接收的值用函数转换"""
def __init__(self, func, *args, **kwargs):
super().__init__(*args, **kwargs)
self.func = func
def to_representation(self, value):
"""read时调用"""
return self.func(value)
def to_internal_value(self, data):
"""write时调用,必须重写"""
return data
class FormatAddress:
region = REGION
@classmethod
def check_code(cls, code_list: list):
"""检查省市区编号合法性"""
for code in code_list:
if code not in REGION.keys():
return False
return True
@classmethod
def get_format_address(cls, province, city, county, address: str):
"""拼装省市区地址"""
province_str = cls.get_region(province)
city_str = cls.get_region(city)
county_str = cls.get_region(county)
ret = []
for _ in [province_str, city_str, county_str]:
# 北京, 北京市
if _.rstrip("市") not in ret:
ret.append(_)
detail_address = "".join(ret)
if detail_address not in address:
address = detail_address + address
return address
@classmethod
def get_region(cls, region):
if isinstance(region, int):
return cls.region.get(region, "")
elif isinstance(region, str):
if region.isdigit():
return cls.region.get(int(region), "")
else:
return region
return ""
class Emoji:
""" Emoji表情 """
@staticmethod
def filter_emoji(keyword):
keyword = re.compile(u"[\U00010000-\U0010ffff]").sub(u"", keyword)
return keyword
@staticmethod
def check_emoji(keyword):
reg_emoji = re.compile(u"[\U00010000-\U0010ffff]")
has_emoji = re.search(reg_emoji, keyword)
if has_emoji:
return True
else:
return False
class NumGenerator:
""" 单号生成器 """
@staticmethod
def generate(shop_id: int, order_type: int) -> str:
""" 生成订单号,规则:店铺类型2位 + 店铺自增编号5位 + 订单日期6位 + 订单类型2位 + 订单自增编号4位
:param order_type: 订单类型: 1:普通订单,5:拼团订单
"""
today = datetime.date.today().strftime("%y%m%d")
key = "num:{shop_id}:{order_type}:{today}".format(
shop_id=shop_id, order_type=order_type, today=today
)
redis_conn = get_redis_connection("num_generate")
num = redis_conn.incr(key)
redis_conn.expire(key, 3600 * 24)
result = "{shop_type}{shop_id_fill}{today}{order_type}{num_fill}".format(
shop_type=ORDER_SHOP_TYPE_PREFIX,
shop_id_fill=str(shop_id).zfill(5),
today=today,
order_type=str(order_type).zfill(2),
num_fill=str(num).zfill(4),
)
return result
@staticmethod
def decode(num: str) -> tuple:
""" 解码订单号
:param num: 订单号码
:return: 店铺id, 订单类型
"""
shop_id = num[2:7]
order_type = num[13:15]
return (int(shop_id), order_type)
class TimeFunc:
@staticmethod
def get_to_date_by_from_date(from_date, to_date, statistic_type):
"""根据选择的日期,月份,年份获取起止区间
"""
if statistic_type not in [1, 3, 4]:
raise ValueError("统计类型不受支持")
try:
if statistic_type == 1:
from_date = datetime.datetime.strptime(from_date, "%Y-%m-%d")
if to_date:
to_date = datetime.datetime.strptime(
to_date, "%Y-%m-%d"
) + datetime.timedelta(days=1)
else:
to_date = from_date + datetime.timedelta(days=1)
elif statistic_type == 3:
from_date = datetime.datetime.strptime(from_date, "%Y-%m")
to_date = datetime.datetime.strptime(to_date, "%Y-%m")
if to_date.month + 1 > 12:
month = 1
year = to_date.year + 1
else:
year = to_date.year
month = to_date.month + 1
to_date = datetime.datetime(year=year, month=month, day=1)
elif statistic_type == 4:
# 按年现在不进行筛选,给个默认所有的日期
from_date = datetime.datetime(year=2015, month=1, day=1)
to_date = datetime.datetime(year=2100, month=1, day=1)
except ValueError:
raise ValueError("日期格式传入错误,请检查")
return from_date, to_date
class Baidu:
@staticmethod
def get_baidu_token():
"""获取access_token,先从redis中取,取不到则重新生成"""
redis_conn = get_redis_connection("default")
access_token = redis_conn.get("wsc_baidu_token")
if access_token:
return access_token.decode("utf-8")
url = "https://openapi.baidu.com/oauth/2.0/token?grant_type=client_credentials&client_id={}&client_secret={}".format(
BAIDU_APIKEY, BAIDU_SECRETKEY
)
data = requests.get(
url,
headers={"Content-Type": "application/json; charset=UTF-8"},
verify=False,
).json()
access_token = data.get("access_token", "")
if access_token:
redis_conn.setex("wsc_baidu_token", 60 * 60 * 24 * 10, access_token)
return access_token
<file_sep>/wsc_django/wsc_django/utils/validators.py
"""自定义验证器存放"""
import re
from rest_framework import serializers
from order.constant import OrderDeliveryMethod, OrderPayType
from shop.constant import ShopVerifyActive, ShopVerifyType, ShopStatus
from staff.services import cal_all_roles_without_super, cal_all_permission
#########全局相关###########
def mobile_validator(value):
"""电话号验证"""
if not re.match(r'1[3-9]\d{9}', value):
raise serializers.ValidationError("手机号格式不正确")
########商铺相关###########
def shop_status_validator(value):
"""商铺状态验证"""
shop_status_list = [
ShopStatus.NORMAL,
ShopStatus.REJECTED
]
if value not in shop_status_list:
raise serializers.ValidationError("商铺状态有误")
def shop_verify_status_validator(value):
"""商铺认证状态验证"""
shop_verify_status_list = [
ShopVerifyActive.YES,
ShopVerifyActive.CHECKING,
ShopVerifyActive.REJECTED
]
if value not in shop_verify_status_list:
raise serializers.ValidationError("商铺认证状态有误")
def shop_verify_type_validator(value):
"""商铺认证状态验证"""
shop_verify_status_list = [
ShopVerifyType.ENTERPRISE,
ShopVerifyType.INDIVIDUAL
]
if value not in shop_verify_status_list:
raise serializers.ValidationError("商铺认证类型有误")
########订单相关###########
def delivery_method_validator(value):
"""订单配送方式验证"""
delivery_method_list = [
OrderDeliveryMethod.HOME_DELIVERY,
OrderDeliveryMethod.CUSTOMER_PICK,
]
if value not in delivery_method_list:
raise serializers.ValidationError("配送方式有误")
def order_pay_type_validator(value):
"""订单支付方式验证"""
order_pay_type_list = [
OrderPayType.WEIXIN_JSAPI,
OrderPayType.ON_DELIVERY
]
if value not in order_pay_type_list:
raise serializers.ValidationError("订单支付方式有误")<file_sep>/wsc_django/wsc_django/apps/my_celery/celery_tplmsg_task.py
""" 微信模板消息异步任务 """
import copy
import sys
import urllib.parse
from celery import Celery
from django_redis import get_redis_connection
from wechatpy.client import WeChatClient
from user.services import list_openid_by_user_ids_and_appid
from order.selectors import get_order_detail_by_id_only_msg_notify
from wsc_django.utils.constant import DateFormat
from delivery.constant import DeliveryType
from order.constant import OrderDeliveryMethod, OrderType
from wsc_django.apps.settings import CELERY_BROKER, MP_APPID, MP_APPSECRET, WSC_HOST_NAME
debug = False
if "-n123" in sys.argv:
debug = True
app = Celery("wsc_tpl_msg", broker=CELERY_BROKER, backend="")
app.conf.timezone = "Asia/Shanghai" # 时区
app.conf.worker_concurrency = 4 # 任务并发数
app.conf.task_soft_time_limit = 300 # 任务超时时间
app.conf.worker_disable_rate_limits = True # 任务频率限制开关
app.conf.task_routes = {} # 任务调度队列, 在register_celery里面自动注册
# 颜色常量
COLOR_GREEN = "#44b549"
COLOR_RED = "#FF0000"
COLOR_BLUE = "#173177"
COLOR_BLACK = "#333"
# 模板类型
ORDER_COMMIT = 1 # 提交订单
ORDER_DELIVERY = 2 # 订单配送
ORDER_FINISH = 3 # 订单完成
ORDER_REFUND = 4 # 订单退款
REFUND_FAIL = 5 # 拼团自动退款失败
# 模板
TEMPLATES = {
ORDER_COMMIT: {
"first": {"value": "{}", "color": COLOR_GREEN},
"keyword1": {"value": "{}", "color": COLOR_BLACK},
"keyword2": {"value": "{}", "color": COLOR_BLACK},
"keyword3": {"value": "{}", "color": COLOR_BLACK},
"keyword4": {"value": "{}", "color": COLOR_BLACK},
"remark": {"value": "{}", "color": COLOR_BLUE},
},
ORDER_DELIVERY: {
"first": {"value": "{}", "color": COLOR_GREEN},
"keyword1": {"value": "{}", "color": COLOR_BLACK},
"keyword2": {"value": "{}", "color": COLOR_BLACK},
"keyword3": {"value": "{}", "color": COLOR_BLACK},
"keyword4": {"value": "{}", "color": COLOR_BLACK},
"remark": {"value": "{}", "color": COLOR_BLUE},
},
ORDER_FINISH: {
"first": {"value": "{}", "color": COLOR_GREEN},
"keyword1": {"value": "{}", "color": COLOR_BLACK},
"keyword2": {"value": "{}", "color": COLOR_BLACK},
"remark": {"value": "{}", "color": COLOR_BLUE},
},
ORDER_REFUND: {
"first": {"value": "{}", "color": COLOR_GREEN},
"keyword1": {"value": "{}", "color": COLOR_BLACK},
"keyword2": {"value": "{}", "color": COLOR_BLACK},
"keyword3": {"value": "{}", "color": COLOR_BLACK},
"keyword4": {"value": "{}", "color": COLOR_BLACK},
},
REFUND_FAIL: {
"first": {"value": "{}", "color": COLOR_GREEN},
"keyword1": {"value": "{}", "color": COLOR_BLACK},
"keyword2": {"value": "{}", "color": COLOR_BLACK},
"keyword3": {"value": "{}", "color": COLOR_BLACK},
"keyword4": {"value": "{}", "color": COLOR_BLACK},
"remark": {"value": "{}", "color": COLOR_BLUE},
},
}
def register_celery():
def register(cls):
cls_name = cls.__name__
celery_func_name = "celery_send_{}".format(cls_name)
celery_name = "tpl_msg.send{}".format(cls_name)
celery_func = app.task(bind=True, name=celery_name)(cls._run)
setattr(cls, celery_func_name, celery_func)
app.conf.task_routes[celery_name] = {"queue": "wsc_tpl_msg"}
return cls
return register
class TplMsgStrategy:
""" 策略基类,不提供数据,仅仅提供一个格式化成微信参数的方法
:param tpl_format: int, 模板的格式类型
:param default_template_id: string, 默认的模板id,通过森果公众号发送的模板
:param template_id_short: string, 短模板id主要用于获取第三方公众号的模板id
:param special_color: dict, 特殊的颜色控制
"""
_tpl_format = None
_default_template_id = None
_template_id_short = None
_special_color = None
@classmethod
def _format(cls, **kwargs):
""" 格式化各个策略生成的参数,生成微信可以识别的字典
:params *args: 可变参数, 这个可变参数主要定义了策略子类中get()方法需要的所有参数
:params **kwargs: 可变关键字参数
"""
tpl = cls._get_tpl()
url, data = cls._get(**kwargs)
for k, v in data.items():
tpl[k]["value"] = v
if cls._special_color:
tpl = cls._change_color(cls._special_color, tpl)
return url, tpl
@classmethod
def _get_tpl(cls):
""" 根据各个策略的配置(tpl_format)从模板库中获取模板的格式, 并重新生成一份
:rtype: dict, 传给微信的字典
"""
tpl = copy.deepcopy(TEMPLATES.get(cls._tpl_format))
if not tpl:
raise NotImplementedError
return tpl
@classmethod
def _get(cls, **kwargs):
""" 提供访问url和data
:param order: 订单对象,包含各种详情
:return: url, string, 微信模板消息对应的链接地址
:return: data, dict, 发送给微信的详细参数
:rtype: tuple, (url, data)
"""
raise NotImplementedError
@classmethod
def _change_color(self, color, tpl):
""" 在模板样式基础上做的字体颜色调整
:param color: dict, 要改变颜色的键的字典
:param tpl: dict, 模板字典
"""
for k, v in color.items():
tpl[k]["color"] = v
return tpl
@classmethod
def send(cls, **kwargs):
"""必传参数order_id"""
cls_name = cls.__name__
handler_method = getattr(cls, "celery_send_{}".format(cls_name))
handler_method.delay(cls_name, **kwargs)
def _run(self, cls_name, **kwargs):
"""self是_run函数本身"""
cls = eval(cls_name)
to_user_id_list, shop_id, new_kwargs = cls._middle_handler(
**kwargs
)
touser_list, template_id, wechat_client = cls._get_mp_info(
to_user_id_list, shop_id
)
url, tpl = cls._format(**new_kwargs)
for touser in touser_list:
try:
ret = wechat_client.message.send_template(
user_id=touser, template_id=template_id, data=tpl, url=url
)
print(ret)
except Exception as e:
print(str(e))
@classmethod
def _middle_handler(cls, **kwargs):
"""获取发送人"""
raise NotImplementedError
@classmethod
def _get_mp_info(cls, to_user_id_list, shop_id):
"""获取发送人的微信openid与微信对象"""
app_id = MP_APPID
app_secret = MP_APPSECRET
template_id = cls._default_template_id
# 森果自己的微信公众号和零售共用一个
redis_conn = get_redis_connection("default")
access_token = redis_conn.get("access_token")
access_token = access_token.decode("utf-8") if access_token else None
wechat_client = WeChatClient(
appid=app_id, secret=app_secret, access_token=access_token, timeout=5
)
if not access_token:
access_token = wechat_client.fetch_access_token()
redis_conn.setex("access_token", 3600, access_token.get("access_token"))
user_openid_list = list_openid_by_user_ids_and_appid(
to_user_id_list, app_id
)
touser_list = [user_openid.wx_openid for user_openid in user_openid_list]
return touser_list, template_id, wechat_client
# 普通订单提交成功通知
@register_celery()
class OrderCommitTplMsg(TplMsgStrategy):
"""订单提交成功消息模板通知"""
_tpl_format = ORDER_COMMIT
_default_template_id = (
"4QuRCzRuxVFWuz1gw8hHXlAaJZL4H2lLAyPXNr1MXIs"
if not debug
else "j33gYWAno6Q0_tqzWI40ZoAj3m39TEp-seWH_biCdBs"
)
_template_id_short = "OPENTM410958953"
@classmethod
def _middle_handler(cls, **kwargs):
order_id = kwargs["order_id"]
res, order = get_order_detail_by_id_only_msg_notify(order_id)
if not res:
raise ValueError("订单不存在")
to_user_id_list = [order.customer.user.id]
shop_id = order.shop.id
new_kwargs = {"order": order}
return to_user_id_list, shop_id, new_kwargs
@classmethod
def _get(cls, **kwargs):
order = kwargs["order"]
products = []
for order_detail in order.order_details:
product = order_detail.product
products.append(
"%s, %s*%s"
% (
product.name,
round(float(order_detail.price_net), 2),
int(order_detail.quantity_net),
)
if order_detail.quantity_net > 1
else "%s, %s"
% (product.name, round(float(order_detail.price_net), 2))
)
data = {
"first": "您的订单提交成功!",
"keyword1": order.shop.shop_name,
"keyword2": order.update_at.strftime(DateFormat.TIME),
"keyword3": "[" + ",".join(products) + "]",
"keyword4": str(round(float(order.total_amount_net), 2)),
"remark": "订单{}正在准备中,点击查看订单详情".format(order.order_num),
}
# 前端路由,前端路由变了这里有相应的改变
url = urllib.parse.urljoin(
WSC_HOST_NAME,
"/mall/?#/{}/orderDetail?id={}".format(order.shop.shop_code, order.id),
)
return url, data
# # 拼团成功的模板消息
# @register_celery()
# class GrouponOrderSuccessAttendTplMsg(TplMsgStrategy):
# """拼团成功的模板消息"""
#
# _tpl_format = ORDER_COMMIT
# _default_template_id = (
# "4QuRCzRuxVFWuz1gw8hHXlAaJZL4H2lLAyPXNr1MXIs"
# if not debug
# else "j33gYWAno6Q0_tqzWI40ZoAj3m39TEp-seWH_biCdBs"
# )
# _template_id_short = "OPENTM410958953"
#
# @classmethod
# def _middle_handler(cls, **kwargs):
# order_id = kwargs["order_id"]
# res, order = get_order_detail_by_id_only_msg_notify(order_id)
# if not res:
# raise ValueError("订单不存在")
# to_user_id_list = [order.customer.user.id]
# shop_id = order.shop.id
# new_kwargs = {"order": order}
#
# return to_user_id_list, shop_id, new_kwargs
#
# @classmethod
# def _get(cls, **kwargs):
# order = kwargs["order"]
#
# products = []
# for order_detail in order.order_details:
# product = order_detail.product
# products.append(
# "%s, %s*%s"
# % (
# product.name,
# round(float(order_detail.price_net), 2),
# int(order_detail.quantity_net),
# )
# if order_detail.quantity_net > 1
# else "%s, %s"
# % (product.name, round(float(order_detail.price_net), 2))
# )
# data = {
# "first": "拼团成功!",
# "keyword1": order.shop.shop_name,
# "keyword2": order.create_time.strftime(DateFormat.TIME),
# "keyword3": "[" + ",".join(products) + "]",
# "keyword4": str(round(float(order.total_amount_net), 2)),
# "remark": "订单{}正在准备中,点击查看订单详情".format(order.order_num),
# }
# # 前端路由,前端路由变了这里有相应的改变
# url = urllib.parse.urljoin(
# WSC_HOST_NAME,
# "/mall/?#/{}/orderDetail?id={}".format(order.shop.shop_code, order.id),
# )
# return url, data
# 订单配送通知
@register_celery()
class OrderDeliveryTplMsg(TplMsgStrategy):
"""订单消息通知模板"""
_tpl_format = ORDER_DELIVERY
_default_template_id = (
"bpnc40s6mmT30y-sgU4sEEqOEzKoP485IWSzzBLdHkk"
if not debug
else "O9FU1v95T2VTjm1p0un2duxvmVuPoCF4JyK1uvTaSkc"
)
_template_id_short = "OPENTM207710423"
@classmethod
def _middle_handler(cls, **kwargs):
order_id = kwargs["order_id"]
res, order = get_order_detail_by_id_only_msg_notify(order_id)
if not res:
raise ValueError("订单不存在")
to_user_id_list = [order.customer.user.id]
shop_id = order.shop.id
new_kwargs = {"order": order}
return to_user_id_list, shop_id, new_kwargs
@classmethod
def _get(cls, **kwargs):
order = kwargs["order"]
data = {
"first": "您有一笔订单已完成准备,等待自提"
if order.delivery_method == OrderDeliveryMethod.CUSTOMER_PICK
else "您有一笔订单已安排配送"
if order.delivery.delivery_type == DeliveryType.StaffDelivery
else "您有一笔订单已安排发货",
"keyword1": order.create_time.strftime(DateFormat.TIME),
"keyword2": order.shop.shop_name,
"keyword3": order.order_num,
"keyword4": order.address.full_address,
"remark": "自提时段:%s" % order.delivery_period_text
if order.delivery_method == OrderDeliveryMethod.CUSTOMER_PICK
else "商家配送"
if order.delivery.delivery_type == DeliveryType.StaffDelivery
else "%s %s" % (order.delivery.company, order.delivery.express_num),
}
# 前端路由,前端路由变了之后这里要相应改变
url = urllib.parse.urljoin(
WSC_HOST_NAME,
"/mall/?#/{}/orderDetail?id={}".format(order.shop.shop_code, order.id),
)
return url, data
# 订单完成消息通知
@register_celery()
class OrderFinishTplMsg(TplMsgStrategy):
"""订单完成消息通知模板"""
_tpl_format = ORDER_FINISH
_default_template_id = (
"Bc9bSFi2M2_R39k6JnojeUYf3tuISaYtZ0qqsNKcUM0"
if not debug
else "qKBBLSOPmVfTtzvVageD0EYkU8nrTyHS8IzPm1rh7CU"
)
_template_id_short = "OPENTM202521011"
@classmethod
def _middle_handler(cls, **kwargs):
order_id = kwargs["order_id"]
res, order = get_order_detail_by_id_only_msg_notify(order_id)
if not res:
raise ValueError("订单不存在")
to_user_id_list = [order.customer.user.id]
shop_id = order.shop.id
new_kwargs = {"order": order}
return to_user_id_list, shop_id, new_kwargs
@classmethod
def _get(cls, **kwargs):
order = kwargs["order"]
data = {
"first": "您在【{}】有一笔订单已完成".format(order.shop.shop_name),
"keyword1": order.order_num,
"keyword2": order.update_at.strftime(DateFormat.TIME),
"remark": "可点击查看订单详情",
}
# 前端路由,前端路由变了之后这里要相应改变
url = urllib.parse.urljoin(
WSC_HOST_NAME,
"/mall/?#/{}/orderDetail?id={}".format(order.shop.shop_code, order.id),
)
return url, data
# 订单退款消息通知, 货到付款的不发送消息
@register_celery()
class OrderRefundTplMsg(TplMsgStrategy):
"""订单退款消息通知模板"""
_tpl_format = ORDER_REFUND
_default_template_id = (
"G7sbLBTNGj_AENUiG9C53ShAWzUCC9SDh2B20lQ_Nvs"
if not debug
else "vwoSG7HipnL3q1Eff1P0RYeHfWuvs8cjbCKGO-gQxtA"
)
_template_id_short = "OPENTM200565278"
@classmethod
def _middle_handler(cls, **kwargs):
order_id = kwargs["order_id"]
res, order = get_order_detail_by_id_only_msg_notify(order_id)
if not res:
raise ValueError("订单不存在")
to_user_id_list = [order.customer.user.id]
shop_id = order.shop.id
new_kwargs = {"order": order}
return to_user_id_list, shop_id, new_kwargs
@classmethod
def _get(cls, **kwargs):
order = kwargs["order"]
data = {
"first": "您在【{}】有一笔订单已退款".format(order.shop.shop_name),
"keyword1": order.order_num,
"keyword2": str(round(float(order.total_amount_net), 2)),
"keyword3": "微信支付",
"keyword4": "立即到账",
}
# 前端路由,前端路由变了之后这里要相应改变
url = urllib.parse.urljoin(
WSC_HOST_NAME,
"/mall/?#/{}/orderDetail?id={}".format(order.shop.shop_code, order.id),
)
return url, data
# # 拼团失败自动退款消息通知
# @register_celery()
# class GrouponOrderFailAttendTplMsg(TplMsgStrategy):
# """拼团失败消息通知模板"""
#
# _tpl_format = ORDER_REFUND
# _default_template_id = (
# "G7sbLBTNGj_AENUiG9C53ShAWzUCC9SDh2B20lQ_Nvs"
# if not debug
# else "vwoSG7HipnL3q1Eff1P0RYeHfWuvs8cjbCKGO-gQxtA"
# )
# _template_id_short = "OPENTM200565278"
#
# @classmethod
# def _middle_handler(cls, session, **kwargs):
# order_id = kwargs["order_id"]
# res, order = get_order_detail_by_id_only_msg_notify(session, order_id)
# if not res:
# raise ValueError("订单不存在")
# to_user_id_list = [order.customer.user_id]
# shop_id = order.shop.id
# new_kwargs = {"order": order}
#
# return to_user_id_list, shop_id, new_kwargs
#
# @classmethod
# def _get(cls, **kwargs):
# order = kwargs["order"]
# assert order.order_type == OrderType.GROUPON
#
# data = {
# "first": "您在【{}】有一笔订单拼团失败,已为您安排退款".format(order.shop.shop_name),
# "keyword1": order.num,
# "keyword2": str(round(float(order.total_amount_net), 2)),
# "keyword3": "微信支付",
# "keyword4": "立即到账",
# }
# # 前端路由,前端路由变了之后这里要相应改变
# url = urllib.parse.urljoin(
# WSC_HOST_NAME,
# "/mall/?#/{}/orderDetail?id={}".format(order.shop.shop_code, order.id),
# )
# return url, data
# # 拼团订单自动退款失败消息通知
# @register_celery()
# class GrouponOrderRefundFailTplMsg(TplMsgStrategy):
# """拼团订单自动退款失败消息通知"""
#
# _tpl_format = REFUND_FAIL
# _default_template_id = (
# "CEWnYIwVJGhRhe-1gpszBVZ7bfVj8TRxechU4tKabcg"
# if not debug
# else "chiC0Q7CYSFYWDd0QhIirfoOJW9uVzdTJmQruP4b0kM"
# )
# _template_id_short = "OPENTM412546294"
#
# @classmethod
# def _middle_handler(cls, session, **kwargs):
# order_id = kwargs["order_id"]
# res, order = get_order_detail_by_id_only_msg_notify(session, order_id)
# if not res:
# raise ValueError("订单不存在")
# shop_id = order.shop_id
# staff_list = list_staff_by_shop_id_with_user(session, shop_id)
# to_user_id_list = [staff.user_id for staff in staff_list]
# new_kwargs = {"order": order}
#
# return to_user_id_list, shop_id, new_kwargs
#
# @classmethod
# def _get(cls, **kwargs):
# order = kwargs["order"]
#
# data = {
# "first": "订单拼团失败,因账号余额不足,无法自动退款",
# "keyword1": order.num,
# "keyword2": order.update_at.strftime(DateFormat.TIME),
# "keyword3": str(round(float(order.total_amount_net), 2)),
# "keyword4": "余额不足",
# "remark": "请在电脑商户后台-订单板块查看处理",
# }
# url = None
#
# return url, data
<file_sep>/wsc_django/wsc_django/apps/delivery/migrations/0001_initial.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Delivery',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('delivery_type', models.SmallIntegerField(default=2, verbose_name='配送方式')),
('company', models.CharField(max_length=32, verbose_name='快递公司,仅在配送方式为快递时才有')),
('express_num', models.CharField(max_length=32, verbose_name='快递单号,仅在配送方式为快递时才有')),
],
options={
'verbose_name': '配送记录',
'verbose_name_plural': '配送记录',
'db_table': 'delivery',
},
),
migrations.CreateModel(
name='DeliveryConfig',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('home_on', models.BooleanField(default=True, verbose_name='配送模式是否开启')),
('home_minimum_order_amount', models.DecimalField(decimal_places=4, default=0, max_digits=13, verbose_name='配送模式起送金额')),
('home_delivery_amount', models.DecimalField(decimal_places=4, default=0, max_digits=13, verbose_name='配送模式配送费')),
('home_minimum_free_amount', models.DecimalField(decimal_places=4, default=0, max_digits=13, verbose_name='配送模式免配送费最小金额')),
('pick_on', models.BooleanField(default=True, verbose_name='自提模式是否开启')),
('pick_service_amount', models.DecimalField(decimal_places=4, default=0, max_digits=13, verbose_name='自提模式服务费')),
('pick_minimum_free_amount', models.DecimalField(decimal_places=4, default=0, max_digits=13, verbose_name='自提模式免服务费最小金额')),
('pick_today_on', models.BooleanField(default=True, verbose_name='今天自提是否开启')),
('pick_tomorrow_on', models.BooleanField(default=True, verbose_name='明天自提是否开启')),
],
options={
'verbose_name': '配送配置',
'verbose_name_plural': '配送配置',
'db_table': 'delivery_config',
},
),
migrations.CreateModel(
name='PickPeriodConfigLine',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('from_time', models.CharField(max_length=16, verbose_name='自提起始时间')),
('to_time', models.CharField(max_length=16, verbose_name='自提终止时间')),
('delivery_config', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='delivery.deliveryconfig', verbose_name='订单配送配置对象')),
],
options={
'verbose_name': '自提时间段',
'verbose_name_plural': '自提时间段',
'db_table': 'pick_period_config_line',
},
),
]
<file_sep>/wsc_django/wsc_django/apps/dashboard/services.py
import datetime
import json
import numpy as np
import pandas as pd
from customer.models import Customer
from dashboard.constant import StatisticType
from order.constant import OrderStatus
from order.models import Order, OrderDetail
from product.models import Product
def list_shop_dashboard_data(
shop_id: int,
from_date: datetime.date,
to_date: datetime.date,
statistic_type: int,
):
"""
获取店铺经营数据
:param shop_id:
:param from_date:
:param to_date:
:param statistic_type: 统计数据类型 1:日 3:月 4:年
:return:
"""
if statistic_type == StatisticType.DAILY:
fmt = "%Y-%m-%d"
elif statistic_type == StatisticType.MONTHLY:
fmt = "%Y-%m"
else:
fmt = "%Y"
# 订单
orders = (
Order.objects.filter(
shop_id=shop_id,
create_date__range=[from_date, to_date],
order_status__in=[
OrderStatus.PAID,
OrderStatus.CONFIRMED,
OrderStatus.FINISHED,
OrderStatus.REFUNDED,
]
)
.all()
)
orders = list(orders.values("create_date", "order_status", "total_amount_net"))
orders_frame = pd.DataFrame(
orders, columns=["create_date", "status", "total_amount_net"]
)
orders_agg_valid = (
orders_frame.loc[orders_frame.status != OrderStatus.REFUNDED]
.groupby(orders_frame["create_date"].apply(lambda x: x.strftime(fmt)))
.agg(
order_amount_valid=pd.NamedAgg(column="total_amount_net", aggfunc=sum),
order_count_valid=pd.NamedAgg(column="status", aggfunc=len),
)
)
orders_agg_all = orders_frame.groupby(
orders_frame["create_date"].apply(lambda x: x.strftime(fmt))
).agg(order_count_all=pd.NamedAgg(column="status", aggfunc=len))
orders_agg = pd.merge(
orders_agg_all, orders_agg_valid, how="left", on="create_date"
).fillna(0)
# 客户
customers = (
Customer.objects.filter(
shop_id=shop_id,
create_date__range=[from_date, to_date],
)
.all()
)
customers = list(customers.values("create_date", "id"))
customers_frame = pd.DataFrame(customers, columns=["create_date", "id"])
customers_agg = customers_frame.groupby(
customers_frame["create_date"].apply(lambda x: x.strftime(fmt))
).agg(customer_new_count=pd.NamedAgg(column="id", aggfunc=len))
shop_agg = pd.merge(orders_agg, customers_agg, how="outer", on="create_date")
if shop_agg.empty:
return True, []
shop_agg["amount_per_order"] = shop_agg.apply(
lambda x: float(x["order_amount_valid"]) / x["order_count_valid"]
if x["order_count_valid"] > 0
else 0,
axis=1,
)
shop_agg.fillna(0, inplace=True)
shop_agg.sort_index(ascending=False, inplace=True)
# 类型转换及圆整 .map(lambda x: ('%.2f') %x)
shop_agg["order_count_all"] = shop_agg["order_count_all"].astype("int")
shop_agg["order_amount_valid"] = (
shop_agg["order_amount_valid"].astype("float").round(decimals=2)
)
shop_agg["order_count_valid"] = shop_agg["order_count_valid"].astype("int")
shop_agg["customer_new_count"] = shop_agg["customer_new_count"].astype("int")
shop_agg["amount_per_order"] = (
shop_agg["amount_per_order"].astype("float").round(decimals=2)
)
return True, json.loads(shop_agg.to_json(orient="table"))["data"]
def list_order_dashboard_data(
shop_id: int,
from_date: datetime.date,
to_date: datetime.date,
statistic_type: int,
):
"""
获取订单数据
:param shop_id:
:param from_date:
:param to_date:
:param statistic_type:
:return:
"""
if statistic_type == StatisticType.DAILY:
fmt = "%Y-%m-%d"
elif statistic_type == StatisticType.MONTHLY:
fmt = "%Y-%m"
else:
fmt = "%Y"
# 订单
orders = (
Order.objects.filter(
shop_id=shop_id,
create_date__range=[from_date, to_date],
order_status__in=[
OrderStatus.PAID,
OrderStatus.CONFIRMED,
OrderStatus.FINISHED,
OrderStatus.REFUNDED,
]
)
.all()
)
orders = list(orders.values("create_date", "order_status", "total_amount_net"))
orders_frame = pd.DataFrame(
orders, columns=["create_date", "status", "total_amount_net"]
)
orders_agg_valid = (
orders_frame.loc[orders_frame.status != OrderStatus.REFUNDED]
.groupby(orders_frame["create_date"].apply(lambda x: x.strftime(fmt)))
.agg(
order_amount_valid=pd.NamedAgg(column="total_amount_net", aggfunc="sum"),
order_count_valid=pd.NamedAgg(column="status", aggfunc="count"),
)
)
orders_agg_all = orders_frame.groupby(
orders_frame["create_date"].apply(lambda x: x.strftime(fmt))
).agg(
order_amount_all=pd.NamedAgg(column="total_amount_net", aggfunc="sum"),
order_count_all=pd.NamedAgg(column="status", aggfunc="count"),
)
orders_agg = pd.merge(
orders_agg_all, orders_agg_valid, how="left", on="create_date"
).fillna(0)
if orders_agg.empty:
return True, []
orders_agg["order_amount_refund"] = orders_agg.apply(
lambda x: float(x["order_amount_all"]) - float(x["order_amount_valid"]), axis=1
)
orders_agg["order_count_refund"] = orders_agg.apply(
lambda x: float(x["order_count_all"]) - float(x["order_count_valid"]), axis=1
)
orders_agg.fillna(0, inplace=True)
orders_agg.sort_index(ascending=False, inplace=True)
# 类型转换及圆整 .map(lambda x: ('%.2f') %x)
orders_agg["order_count_all"] = orders_agg["order_count_all"].astype("int")
orders_agg["order_count_valid"] = orders_agg["order_count_valid"].astype("int")
orders_agg["order_count_refund"] = orders_agg["order_count_refund"].astype("int")
orders_agg["order_amount_all"] = (
orders_agg["order_amount_all"].astype("float").round(decimals=2)
)
orders_agg["order_amount_valid"] = (
orders_agg["order_amount_valid"].astype("float").round(decimals=2)
)
orders_agg["order_amount_refund"] = (
orders_agg["order_amount_refund"].astype("float").round(decimals=2)
)
return True, json.loads(orders_agg.to_json(orient="table"))["data"]
def list_product_dashboard_data(
shop_id: int, from_date: datetime.date, to_date: datetime.date
):
"""
获取订单数据
:param shop_id:
:param from_date:
:param to_date:
:return:
"""
product_details = (
OrderDetail.objects.filter(
shop_id=shop_id,
create_date__range=[from_date, to_date],
)
.all()
)
product_details = list(product_details.values(
"amount_net",
"quantity_net",
"product_id",
"customer_id",
"status",
))
products_frame = pd.DataFrame(
product_details,
columns=[
"order_amount_net",
"order_quantity_net",
"product_id",
"customer_id",
"status",
],
)
orders_agg_all = products_frame.groupby("product_id").agg(
order_count=pd.NamedAgg(column="status", aggfunc="count")
)
if orders_agg_all.empty:
return True, []
orders_agg_paid = (
products_frame.loc[
(products_frame["status"] >= OrderStatus.PAID)
& (products_frame["status"] < OrderStatus.REFUNDED)
]
.groupby("product_id")
.agg(
order_count_paid=pd.NamedAgg(column="status", aggfunc="count"),
order_amount_paid=pd.NamedAgg(column="order_amount_net", aggfunc="sum"),
order_quantity_net=pd.NamedAgg(column="order_quantity_net", aggfunc="sum"),
)
)
products_agg = pd.merge(
orders_agg_all, orders_agg_paid, how="left", on="product_id"
).fillna(0)
# 复购率分析
customer_pivot = (
products_frame.loc[products_frame["status"] >= OrderStatus.PAID]
.pivot_table(
index="customer_id", columns="product_id", values="status", aggfunc="count"
)
.applymap(lambda x: 1 if x > 1 else 0 if x == 1 else np.NaN)
)
customers_agg = customer_pivot.sum() / customer_pivot.count()
if customers_agg.empty:
products_agg["rebuy_rate"] = [0 for i in range(products_agg.index.size)]
else:
products_agg["rebuy_rate"] = customers_agg
# 拼商品名
product_ids = [l["product_id"] for l in product_details]
products = (
Product.objects.filter(
id__in=product_ids
)
.all()
)
products = list(products.values(
"id",
"name",
"group_id",
"group__name",
))
products_info_frame = pd.DataFrame(
products,
columns=[
"product_id",
"product_name",
"product_group_id",
"product_group_name",
],
)
products_agg = pd.merge(products_agg, products_info_frame, on="product_id")
products_agg.fillna(0, inplace=True)
products_agg["order_amount_paid"] = (
products_agg["order_amount_paid"].astype("float").round(decimals=2)
)
products_agg["rebuy_rate"] = (
products_agg["rebuy_rate"].astype("float").round(decimals=2)
)
products_agg["order_quantity_net"] = (
products_agg["order_quantity_net"].astype("float").round(decimals=2)
)
return True, products_agg.to_dict(orient="records")<file_sep>/wsc_django/wsc_django/apps/promotion/abstract.py
class AbstractPromotionRule:
""" 抽象促销活动规则类,定义促销活动的接口,由子类实现接口 """
def limit(self, *args, **kwargs):
""" 促销的限制性规则,用于优惠的下限,比如订单最小达到多少钱可以配送,用户限制使用多少优惠券等 """
raise NotImplementedError
def calculate(self, *args, **kwargs):
""" 促销的计算型规则,返回当前优惠提供的优惠金额 """
raise NotImplementedError
class PromotionEventTemplate:
""" 促销活动事件模板类,用于事件发布 """
_event_type = None
def __init__(self, content: dict):
self.content = content
def get_event(self) -> dict:
if not self._event_type:
raise NotImplementedError("必须由子类覆盖事件类型")
self.content.update({"event_type": self._event_type})
return self.content
def __getattr__(self, name):
if self.content.get(name) is not None:
return self.content[name]
raise AttributeError(
"{.__name__!r} object has no attribute {!r}".format(type(self), name)
)<file_sep>/wsc_django/wsc_django/apps/groupon/interface.py
import datetime
from django.utils.timezone import make_aware
from groupon.models import Groupon, GrouponAttend
from celery_tasks.celery_auto_work.tasks import (
auto_publish_groupon,
auto_expire_groupon,
auto_fail_groupon_attend,
auto_cancel_order,
auto_validate_groupon_attend)
from order.selectors import list_unpay_order_by_groupon_attend_ids, list_order_by_groupon_attend_id
def publish_gruopon_interface(groupon: Groupon):
"""
根据拼团开始时间发布拼团
:param groupon:
:return:
"""
countdown = (groupon.from_datetime - make_aware(datetime.datetime.now())).total_seconds()
# 当前时间以前的拼团直接发布,当前时间以后的拼团直接发布
auto_publish_groupon.apply_async(
args=(groupon.shop.id, groupon.id),
countdown=int(countdown) if countdown > 0 else 0,
)
def expire_groupon_interface(groupon: Groupon):
"""
根据拼团结束时间过期拼团
:param groupon:
:return:
"""
countdown = (groupon.to_datetime - make_aware(datetime.datetime.now())).total_seconds()
auto_expire_groupon.apply_async(
args=(groupon.shop.id, groupon.id),
countdown=int(countdown) if countdown > 0 else 0,
)
def immediate_fail_groupon_attend_interface(shop_id: int, groupon_attend: GrouponAttend):
"""立即失效拼团参与(拼团停用)"""
reason = "商家停用该活动"
auto_fail_groupon_attend.apply_async(args=[shop_id, groupon_attend.id, reason])
def list_unpay_order_by_groupon_attend_ids_interface(groupon_attend_ids: list):
"""通过拼团参与id列表列出没支付的订单"""
orders = list_unpay_order_by_groupon_attend_ids(groupon_attend_ids)
return orders
def immediate_cancel_order_interface(shop_id: int, order_id: int):
"""立即取消订单"""
auto_cancel_order.apply_async(args=[shop_id, order_id])
def list_order_by_groupon_attend_id_interface(shop_id: int, groupon_attend_id: int):
"""通过拼团参与ID列出一个团的订单"""
order_list = list_order_by_groupon_attend_id(shop_id, groupon_attend_id)
return order_list
def sync_success_groupon_attend_interface(shop_id: int, groupon_attend_id: int):
"""同步强制成团"""
auto_validate_groupon_attend.apply_async(
args=[shop_id, groupon_attend_id],
kwargs={"force": True}
)
def delay_fail_groupon_attend_interface(shop_id: int, groupon_attend: GrouponAttend):
"""延迟失效拼团参与(拼团参与自动过期)"""
countdown = (groupon_attend.to_datetime - make_aware(datetime.datetime.now())).total_seconds()
reason = "超过开团有效时间"
auto_fail_groupon_attend.apply_async(
args=[shop_id, groupon_attend.id, reason],
countdown=int(countdown) if countdown > 0 else 0,
)<file_sep>/wsc_django/wsc_django/apps/staff/constant.py
class StaffStatus:
"""员工状态"""
NORMAL = 1
DELETED = 0
class StaffRole:
"""员工角色"""
SHOP_SUPER_ADMIN = 0xFF # 店铺超管, 预留两位
SHOP_ADMIN = 0x08 # 店铺管理员
class StaffPermission:
"""员工权限"""
ADMIN_DASHBORD = 0x01
ADMIN_ORDER = 0x02
ADMIN_PRODUCT = 0x04
ADMIN_CUSTOMER = 0x08
ADMIN_PROMOTION = 0x10
ADMIN_STAFF = 0x20
ADMIN_CONFIG = 0x40
class StaffApplyStatus:
"""员工申请状态"""
UNAPPlY = 0 # 未申请
APPLYING = 1 # 申请中
PASS = 2 # 已通过
class StaffApplyExpired:
"""员工申请是否过期,当员工被删除之后,将之前的所有申请全部过期,就可以再次申请,否则不能再次申请"""
YES = 1 # 已过期
NO = 0 # 未过期
<file_sep>/wsc_django/wsc_django/apps/product/serializers.py
from django.db import transaction
from pypinyin import slug
from rest_framework import serializers
from user.serializers import UserSerializer
from wsc_django.utils.constant import DateFormat
from wsc_django.utils.core import FuncField
from product.services import (
create_product,
create_product_group,
create_product_pictures,
update_product_storage_and_create_record,
delete_product_picture_by_product_id,
update_product_group
)
from storage.constant import (
ProductStorageRecordType,
ProductStorageRecordOperatorType,
)
class ProductGrouponSerializer(serializers.Serializer):
"""货品拼团活动序列化器类"""
groupon_id = serializers.IntegerField(source="id", label="拼团id")
price = FuncField(lambda value: round(float(value), 2), label="拼团价格")
to_datetime = serializers.DateTimeField(format=DateFormat.TIME, label="拼团活动结束时间")
groupon_type = serializers.IntegerField(label="拼团活动类型 1:普通 2:老带新")
success_size = serializers.IntegerField(label="成团人数")
quantity_limit = serializers.IntegerField(label="成团数量上限")
succeeded_quantity = FuncField(lambda value: round(float(value), 2), label="已成团件数")
class ProductCreateSerializer(serializers.Serializer):
"""创建货品序列化器类"""
product_id = serializers.IntegerField(source="id", read_only=True, label="货品id")
name = serializers.CharField(max_length=15, min_length=1, required=True, label="货品名称")
group_id = serializers.IntegerField(required=True, label="货品分组id")
price = serializers.DecimalField(
max_digits=13, decimal_places=4, required=True, min_value=0, label="货品单价"
)
storage = serializers.DecimalField(
max_digits=13, decimal_places=4, required=True, min_value=0, label="货品库存"
)
code = serializers.CharField(allow_blank=True, required=False, label="货品编码")
summary = serializers.CharField(
allow_blank=True, max_length=20, min_length=0, required=False, label="货品简介"
)
pictures = serializers.ListField(
child=serializers.CharField(required=False),
allow_empty=True,
required=False,
label="货品轮播图",
)
description = serializers.CharField(required=False, allow_blank=True, label="图文描述")
cover_image_url = serializers.CharField(required=True, label="首页图片")
shop_id = serializers.IntegerField(required=True, label="商铺id")
user_id = serializers.IntegerField(read_only=True, label="创建货品的用户id")
def create(self, validated_data):
user = self.context["self"].current_user
storage = validated_data.pop("storage")
product_pictures = validated_data.pop("pictures")
validated_data["name_acronym"] = slug(validated_data["name"], separator="")
with transaction.atomic():
# 创建一个保存点
save_id = transaction.savepoint()
try:
# 添加货品
product = create_product(validated_data, user.id)
# 添加货品轮播图
create_product_pictures(product.id, product_pictures)
# 更改库存,同时生成库存更改记录
update_product_storage_and_create_record(
product,
user.id,
storage,
ProductStorageRecordType.MANUAL_MODIFY,
ProductStorageRecordOperatorType.STAFF,
)
except Exception as e:
print(e)
# 回滚到保存点
transaction.savepoint_rollback(save_id)
raise
# 提交事务
transaction.savepoint_commit(save_id)
return product
class AdminProductsSerializer(serializers.Serializer):
"""后台货品列表序列化器类"""
product_id = serializers.IntegerField(read_only=True, source="id", label="货品名")
name = serializers.CharField(required=True, label="货品名")
price = FuncField(lambda value: round(float(value), 2), label="货品价格")
storage = FuncField(lambda value: round(float(value), 2), label="货品库存")
status = serializers.IntegerField(read_only=True, label="货品状态")
cover_image_url = serializers.CharField(required=False, label="货品封面图")
promotion_type = serializers.IntegerField(required=False, label="商品活动信息 1: 普通 5: 拼团")
class AdminProductSerializer(AdminProductsSerializer):
"""后台货品序列化器类"""
group_id = serializers.IntegerField(required=False, label="货品分组id")
group_name = serializers.CharField(read_only=True, label="货品分组名称")
pictures = serializers.ListField(
required=False, allow_empty=True, child=serializers.CharField(), label="货品轮播图"
)
code = serializers.CharField(required=False, allow_blank=True, label="货品编码")
summary = serializers.CharField(required=False, allow_blank=True, label="货品简介")
description = serializers.CharField(required=False, allow_blank=True, label="货品描述")
groupon = ProductGrouponSerializer(required=False, label="商品拼团信息")
def update(self, instance, validated_data):
shop = self.context["self"].current_shop
user = self.context["self"].current_user
validated_data["shop_id"] = shop.id
product_pictures = validated_data.pop("pictures", None)
new_storage = validated_data.pop("storage")
with transaction.atomic():
# 创建一个保存点
save_id = transaction.savepoint()
try:
# 更新货品信息
for k, v in validated_data.items():
setattr(instance, k, v)
instance.save()
if product_pictures:
# 更新货品轮播图信息,先删除,再添加
delete_product_picture_by_product_id(instance.id)
create_product_pictures(instance.id, product_pictures)
# 更改库存,同时生成库存更改记录
change_storage = new_storage - instance.storage
if change_storage != 0:
update_product_storage_and_create_record(
instance,
user.id,
change_storage,
ProductStorageRecordType.MANUAL_MODIFY,
ProductStorageRecordOperatorType.STAFF,
)
except Exception as e:
print(e)
# 回滚到保存点
transaction.savepoint_rollback(save_id)
raise
# 提交事务
transaction.savepoint_commit(save_id)
return instance
class AdminProductGroupSerializer(serializers.Serializer):
"""后台货品分组序列化器类"""
group_id = serializers.IntegerField(read_only=True, required=False, source="id", label="分组id")
name = serializers.CharField(required=True, min_length=1, max_length=10, label="分组名称")
description = serializers.CharField(required=False, allow_blank=True, min_length=0, max_length=50, label="分组描述")
default = serializers.IntegerField(required=False, label="默认分组")
product_count = serializers.IntegerField(read_only=True, label="分组下的货品数量")
products = AdminProductSerializer(read_only=True, many=True, label="分组商品列表")
def create(self, validated_data):
shop = self.context["self"].current_shop
user = self.context["self"].current_user
product_group = create_product_group(shop.id, user.id, validated_data)
return product_group
def update(self, instance, validated_data):
# 更新一个货品分组的信息
user = self.context["self"].current_user
shop = self.context["self"].current_shop
instance = update_product_group(instance, user.id, shop.id, **validated_data)
return instance
class AdminProductSaleRecordSerializer(serializers.Serializer):
"""后台货品销售记录序列化器类"""
create_time = serializers.DateTimeField(format=DateFormat.TIME, label="创建时间")
order_num = serializers.CharField(label="订单号")
price_net = FuncField(lambda value: round(float(value), 2), label="单价(优惠后)")
quantity_net = FuncField(lambda value: round(float(value), 2), label="量(优惠后)")
amount_net = FuncField(lambda value: round(float(value), 2), label="金额(优惠后)")
customer = UserSerializer(label="客户信息")
class MallProductSerializer(AdminProductSerializer):
"""商城端货品序列化器类"""
pass # 继承父类
class MallProductGroupSerializer(AdminProductGroupSerializer):
"""商城端货品分组序列化器类"""
pass # 继承父类
<file_sep>/wsc_django/wsc_django/apps/logs/models.py
from django.db import models
from user.models import User
from wsc_django.utils.models import TimeBaseModel
from logs.constant import (
OperateLogModule,
ORDER_LOG_TYPE,
CONFIG_LOG_TYPE,
PROMOTION_LOG_TYPE,
PRODUCT_LOG_TYPE,
STAFF_LOG_TYPE)
class OperateLogUnify(TimeBaseModel):
"""操作记录模型类"""
shop_id = models.IntegerField(null=False, verbose_name="商铺id")
operator = models.ForeignKey(
User, null=False, on_delete=models.CASCADE, db_constraint=False, verbose_name="操作人"
)
operate_time = models.DateTimeField(auto_now_add=True, null=False, verbose_name="操作时间")
operate_module = models.SmallIntegerField(null=False, verbose_name="操作模块")
log_id = models.IntegerField(null=False, verbose_name="子模块的操作记录id")
class Meta:
db_table = "operate_log_unify"
verbose_name = "操作记录"
verbose_name_plural = verbose_name
@classmethod
def get_operate_log_model(cls, module_id):
return {
OperateLogModule.CONFIG: ConfigLog,
OperateLogModule.ORDER: OrderLog,
OperateLogModule.STAFF: StaffLog,
OperateLogModule.PRODUCT: ProductLog,
OperateLogModule.PROMOTION: PromotionLog,
}.get(module_id)
class LogBaseModel(TimeBaseModel):
"""日志基类"""
shop_id = models.IntegerField(null=False, verbose_name="商铺id")
operate_time = models.DateTimeField(auto_now_add=True, null=False, verbose_name="操作时间")
operator = models.ForeignKey(
User, null=False, on_delete=models.CASCADE, db_constraint=False, verbose_name="操作人"
)
operate_type = models.SmallIntegerField(null=False, verbose_name="操作类型")
operate_content = models.CharField(max_length=512, default="", verbose_name="操作内容")
class Meta:
abstract = True # 说明是抽象模型类, 用于继承使用,数据库迁移时不会创建该表
@property
def operate_module(self):
"""子类实现"""
raise NotImplementedError
@property
def operate_type_text(self):
"""子类实现"""
raise NotImplementedError
class OrderLog(LogBaseModel):
"""订单日志模型类"""
order_num = models.CharField(max_length=20, null=False, verbose_name="订单号")
order_id = models.IntegerField(null=False, verbose_name="订单id")
class Meta:
db_table = "order_log"
verbose_name = "订单日志"
verbose_name_plural = verbose_name
@property
def operate_module(self):
return OperateLogModule.ORDER
@property
def operate_type_text(self):
return ORDER_LOG_TYPE.get(self.operate_type)
class ConfigLog(LogBaseModel):
"""设置模块操作日志模型类"""
class Meta:
db_table = "config_log"
verbose_name = "设置模块操作日志"
verbose_name_plural = verbose_name
@property
def operate_module(self):
return OperateLogModule.CONFIG
@property
def operate_type_text(self):
return CONFIG_LOG_TYPE.get(self.operate_type)
class PromotionLog(LogBaseModel):
"""玩法日志模型类"""
class Meta:
db_table = "promotion_log"
verbose_name = "玩法日志"
verbose_name_plural = verbose_name
@property
def operate_module(self):
return OperateLogModule.PROMOTION
@property
def operate_type_text(self):
return PROMOTION_LOG_TYPE.get(self.operate_type)
class ProductLog(LogBaseModel):
"""货品日志模型类"""
class Meta:
db_table = "product_log"
verbose_name = "货品日志"
verbose_name_plural = verbose_name
@property
def operate_module(self):
return OperateLogModule.PRODUCT
@property
def operate_type_text(self):
return PRODUCT_LOG_TYPE.get(self.operate_type)
class StaffLog(LogBaseModel):
"""员工日志模型类"""
staff_id = models.IntegerField(null=False, verbose_name="被操作的员工ID")
class Meta:
db_table = "staff_log"
verbose_name = "员工日志"
verbose_name_plural = verbose_name
@property
def operate_module(self):
return OperateLogModule.STAFF
@property
def operate_type_text(self):
return STAFF_LOG_TYPE.get(self.operate_type)<file_sep>/wsc_django/wsc_django/apps/shop/models.py
from django.db import models
from user.models import User
from shop.constant import (
ShopStatus,
ShopVerifyActive,
ShopVerifyType,
ShopPayActive,
)
from wsc_django.utils.models import TimeBaseModel
class Shop(TimeBaseModel):
"""商铺模型类"""
status = models.SmallIntegerField(
null=False,
default=ShopStatus.CHECKING,
verbose_name="商铺状态 0: 已关闭 1: 正常,审核通过, 2: 审核中, 3: 已拒绝",
)
super_admin = models.ForeignKey(User, on_delete=models.CASCADE, verbose_name="商铺老板")
shop_name = models.CharField(max_length=128, null=False, verbose_name="商铺名称")
shop_code = models.CharField(max_length=16, null=False, default="", verbose_name="随机字符串,用于代替id")
shop_phone = models.CharField(max_length=32, null=False, default="", verbose_name="联系电话")
shop_img = models.CharField(max_length=300, null=False, default="", verbose_name="门头照片")
business_licence = models.CharField(max_length=300, null=False, default="", verbose_name="营业执照")
shop_address = models.CharField(max_length=100, null=False, default="", verbose_name="商铺地址")
shop_county = models.IntegerField(null=False, default=0, verbose_name="商铺所在国家编号")
shop_province = models.IntegerField(null=False, default=0, verbose_name="商铺所在省份编号")
shop_city = models.IntegerField(null=False, default=0, verbose_name="商铺所在城市编号")
create_time = models.DateTimeField(null=False, auto_now_add=True, verbose_name="商铺创建时间")
description = models.CharField(max_length=256, null=False, default="", verbose_name="商铺描述")
inviter_phone = models.CharField(max_length=32, null=False, default="", verbose_name="推荐人手机号")
cerify_active = models.SmallIntegerField(
null=False,
default=ShopVerifyActive.YES,
verbose_name="是否认证,1:是,0:否"
)
shop_verify_type = models.SmallIntegerField(
null=False,
default=ShopVerifyType.INDIVIDUAL,
verbose_name="商铺类型,0:企业,1:个人",
)
shop_verify_content = models.CharField(max_length=200, verbose_name="认证内容(公司名称)")
pay_active = models.SmallIntegerField(
null=False,
default=ShopPayActive.YES,
verbose_name="是否开通线上支付,1:是,0:否",
)
class Meta:
db_table = "shop"
verbose_name = "商铺"
verbose_name_plural = verbose_name
indexes = [
models.Index(name="ux_shop_code", fields=["shop_code"]),
models.Index(name="ix_super_admin", fields=["super_admin"]),
]
class PayChannel(TimeBaseModel):
"""支付渠道模型类"""
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, null=False, verbose_name="店铺对象")
smerchant_no = models.CharField(max_length=15, null=False, default="", verbose_name="商户号")
smerchant_name = models.CharField(max_length=100, null=False, default="", verbose_name="商户名")
smerchant_type_id = models.CharField(max_length=15, null=False, default="", verbose_name="商户类别id")
smerchant_type_name = models.CharField(max_length=81, null=False, default="", verbose_name="商户类别名")
pos_id = models.CharField(max_length=9, null=False, default="", verbose_name="柜台号")
terminal_id1 = models.CharField(max_length=50, null=False, default="", verbose_name="终端号1")
terminal_id2 = models.CharField(max_length=50, null=False, default="", verbose_name="终端号2")
access_token = models.CharField(max_length=32, null=False, default="", verbose_name="扫呗access_token")
clearing_rate = models.FloatField(null=False, default=2.8, verbose_name="商户的清算费率,利楚默认是千分之2.8,建行是0")
clearing_account_id = models.IntegerField(null=False, default=0, verbose_name="商户的清算账号ID")
channel_type = models.SmallIntegerField(null=False, default=0, verbose_name="支付渠道, 1:利楚, 2:建行")
pub_key = models.CharField(max_length=500, verbose_name="账户公匙")
province = models.CharField(max_length=32, null=False, default="Hubei", verbose_name="用户所在省份")
class Meta:
db_table = "pay_channel"
verbose_name = "支付渠道"
verbose_name_plural = verbose_name
class HistoryRealName(TimeBaseModel):
"""存储店铺创建者的历史真实姓名"""
id = models.OneToOneField(
Shop, on_delete=models.CASCADE, primary_key=True, unique=True, null=False,verbose_name="对应的店铺id"
).primary_key
realname = models.CharField(max_length=32, null=False, verbose_name='历史真实姓名')
class Meta:
db_table = "history_realname"
verbose_name = "商铺创建者历史真实姓名"
verbose_name_plural = verbose_name
class ShopRejectReason(TimeBaseModel):
"""拒绝的商铺的拒绝理由"""
id = models.OneToOneField(
Shop, on_delete=models.CASCADE, primary_key=True, unique=True, null=False, verbose_name="对应的店铺id"
).primary_key
reject_reason = models.CharField(max_length=256, null=False, default='', verbose_name="拒绝理由")
class Meta:
db_table = "shop_reject_reason"
verbose_name = "商铺拒绝理由"
verbose_name_plural = verbose_name<file_sep>/wsc_django/wsc_django/apps/storage/serializers.py
from rest_framework import serializers
from user.serializers import UserSerializer
from wsc_django.utils.constant import DateFormat
from wsc_django.utils.core import FuncField
class AdminProductStorageRecordsSerializer(serializers.Serializer):
"""后台货品库存变更记录序列化器类"""
create_time = serializers.DateTimeField(format=DateFormat.TIME, label="创建时间")
type = serializers.IntegerField(label="变更类型")
type_text = serializers.CharField(allow_blank=True, label="变更类型文字版")
operator_type = serializers.IntegerField(label="操作人类型")
operator = UserSerializer(source="user",label="操作人")
change_storage = FuncField(lambda value: round(float(value)), label="变更量")
current_storage = FuncField(lambda value: round(float(value)), label="当前量")
order_num = serializers.CharField(default="-", label="订单号")
<file_sep>/wsc_django/wsc_django/xMiddleware/middleware.py
from django.utils.deprecation import MiddlewareMixin
from user.models import User
from shop.services import (
get_shop_by_shop_code,
get_shop_by_shop_id,
)
class MyMiddleware(MiddlewareMixin):
"""测试使用,免去登录"""
def process_request(self, request):
user = User.objects.get(id=1)
self.current_user = user
class ConfigMiddleware(MiddlewareMixin):
"""进行一些配置"""
def process_request(self, request):
# 从请求体中获取shop_code进行查询
shop_code = request.GET.get("shop_code")
shop = None
if shop_code:
shop = get_shop_by_shop_code(shop_code)
wsc_shop_id = request.COOKIES.get("wsc_shop_id")
# 从cookie中获取shop_id进行查询
if wsc_shop_id and not shop:
shop = get_shop_by_shop_id(int(wsc_shop_id))
request.shop = shop<file_sep>/wsc_django/wsc_django/apps/delivery/serializers.py
from rest_framework import serializers
from wsc_django.utils.core import FuncField
class PickPeriodConfigLineSerializer(serializers.Serializer):
"""自提时间段序列化器类"""
from_time = serializers.DateTimeField(label="自提起始时间")
to_time = serializers.DateTimeField(label="自提终止时间")
class AdminDeliverySerializer(serializers.Serializer):
"""后台配送记录序列化器类"""
delivery_type = serializers.IntegerField(label="配送方式")
company = serializers.CharField(label="快递公司")
express_num = serializers.CharField(label="快递单号")
class AdminDeliveryConfigSerializer(serializers.Serializer):
"""后台配送配置序列化器类"""
home_on = serializers.BooleanField(label="配送模式是否开启")
home_minimum_order_amount = FuncField(lambda value: round(float(value), 2), label="配送模式起送金额")
home_delivery_amount = FuncField(lambda value: round(float(value), 2), label="配送模式配送费")
home_minimum_free_amount = FuncField(lambda value: round(float(value), 2), label="配送模式免配送费最小金额")
pick_on = serializers.BooleanField(label="自提模式是否开启")
pick_service_amount = FuncField(lambda value: round(float(value), 2), label="自提模式服务费")
pick_minimum_free_amount = FuncField(lambda value: round(float(value), 2), label="自提模式免服务费最小金额")
pick_today_on = serializers.BooleanField(label="今天自提是否开启")
pick_tomorrow_on = serializers.BooleanField(label="明天自提是否开启")
pick_periods = PickPeriodConfigLineSerializer(many=True, label="自提时间段")<file_sep>/wsc_django/wsc_django/apps/shop/serializers.py
from rest_framework import serializers
from django.db import transaction
from delivery.services import create_delivery_config, create_pick_period_line
from product.services import create_default_group_by_shop
from shop.constant import ShopStatus
from shop.services import (
create_shop,
create_pay_channel,
create_shop_mini_program_qcode,
create_shop_reject_reason_by_shop_id,
create_shop_creator_history_realname,
)
from staff.services import create_super_admin_staff
from user.serializers import UserSerializer, operatorSerializer
from wsc_django.utils.constant import DateFormat
from config.services import (
create_receipt_by_shop,
create_share_setup,
create_some_config_by_shop_id,
create_msg_notify_by_shop_id,
)
from wsc_django.utils.validators import (
mobile_validator,
shop_verify_status_validator,
shop_verify_type_validator,
shop_status_validator,
)
class ShopCreateSerializer(serializers.Serializer):
"""总后台创建商铺序列化器类"""
id = serializers.IntegerField(read_only=True, label="商铺id")
shop_code = serializers.CharField(read_only=True, label="商铺code")
shop_name = serializers.CharField(required=True, max_length=128, label="商铺名称")
shop_img = serializers.CharField(required=True, max_length=300, label="商铺logo")
shop_province = serializers.CharField(required=True, label="商铺省份编号")
shop_city = serializers.CharField(required=True, label="商铺城市编号")
shop_county = serializers.CharField(required=True, label="商铺区编号")
shop_address = serializers.CharField(required=True, max_length=100, label="详细地址")
description = serializers.CharField(required=True, max_length=200, label="商铺描述")
inviter_phone = serializers.CharField(required=False, validators=[mobile_validator], label="推荐人手机号")
realname = serializers.CharField(required=False, label="历史真实姓名")
def create(self, validated_data):
user = self.context['user']
# 申请时的历史真实姓名
history_realname = validated_data.pop("realname", None)
with transaction.atomic():
# 创建一个保存点
save_id = transaction.savepoint()
try:
# 创建商铺
shop = create_shop(validated_data, user)
# 创建商铺小程序码
create_shop_mini_program_qcode(shop.shop_code)
# 创建小票
create_receipt_by_shop(shop.id)
# 创建默认配送设置
delivery_config = create_delivery_config(shop.id)
create_pick_period_line(delivery_config, "12:00", "13:00")
create_pick_period_line(delivery_config, "17:00", "18:00")
create_pick_period_line(delivery_config, "21:00", "22:00")
# 创建默认商品分组
create_default_group_by_shop(shop)
# 将店铺创建者创建为超级管理员员工
create_super_admin_staff(shop, shop.super_admin)
# 创建店铺分享设置
create_share_setup(shop.id, shop.shop_name)
# 创建一些奇怪的设置
create_some_config_by_shop_id(shop.id)
# 创建默认消息通知配置
create_msg_notify_by_shop_id(shop.id)
# 储存申请者的历史真实姓名
if history_realname:
create_shop_creator_history_realname(shop.id, history_realname)
except Exception as e:
print(e)
# 回滚到保存点
transaction.savepoint_rollback(save_id)
raise
# 提交事务
transaction.savepoint_commit(save_id)
return shop
class SuperShopSerializer(serializers.Serializer):
"""总后台商铺详情序列化器"""
shop_id = serializers.IntegerField(read_only=True, source="id", label="商铺id")
shop_name = serializers.CharField(label="商铺名称")
shop_img = serializers.CharField(label="商铺logo")
shop_province = serializers.CharField(label="商铺省份编号")
shop_city = serializers.CharField(label="商铺城市编号")
shop_county = serializers.CharField(label="商铺区编号")
shop_address = serializers.CharField(label="详细地址")
description = serializers.CharField(label="商铺描述")
create_time = serializers.DateTimeField(label="商铺创建时间")
shop_status = serializers.IntegerField(source="status", label="商铺状态")
create_user_data = UserSerializer(read_only=True, label="商铺创建人信息")
super_admin_data = UserSerializer(label="超管信息")
class SuperShopListSerializer(serializers.Serializer):
"""总后台商铺列表序列化器类"""
shop_id = serializers.IntegerField(read_only=True, source="id", label="商铺id")
shop_name = serializers.CharField(label="商铺名称")
shop_img = serializers.CharField(label="商铺logo")
product_species_count = serializers.IntegerField(label="商铺货品种类数量")
is_super_admin = serializers.IntegerField(label="该用户是否为该店的超级管理员")
shop_status = serializers.IntegerField(source="status", label="商铺状态")
cerify_active = serializers.IntegerField(label="商铺是否认证")
pay_active = serializers.IntegerField(label="商铺是否开通支付")
shop_verify_content = serializers.CharField(label="商铺认证内容")
class AdminShopSerializer(serializers.Serializer):
"""后台商铺信息序列化器类"""
shop_id = serializers.IntegerField(read_only=True, source="id", label="商铺id")
shop_name = serializers.CharField(label="商铺名称")
shop_img = serializers.CharField(label="商铺logo")
shop_phone = serializers.CharField(label="商铺联系电话")
shop_status = serializers.IntegerField(source="status", label="商铺状态")
shop_province = serializers.CharField(label="商铺省份编号")
shop_city = serializers.CharField(label="商铺城市编号")
shop_county = serializers.CharField(label="商铺区编号")
shop_address = serializers.CharField(label="详细地址")
shop_code = serializers.CharField(label="商铺编号")
cerify_active = serializers.IntegerField(label="商铺是否认证")
shop_verify_type = serializers.IntegerField(label="商铺认证类型")
pay_active = serializers.IntegerField(label="商铺是否开通支付")
shop_verify_content = serializers.CharField(label="商铺认证内容")
create_time = serializers.DateTimeField(format=DateFormat.TIME, label="商铺创建时间")
create_user = UserSerializer(read_only=True, label="商铺创建人信息")
class MallShopSerializer(serializers.Serializer):
"""商城端商铺信息序列化器类"""
shop_name = serializers.CharField(label="商铺名称")
shop_code = serializers.CharField(label="商铺编号")
shop_img = serializers.CharField(label="商铺logo")
shop_province = serializers.CharField(label="商铺省份编号")
shop_city = serializers.CharField(label="商铺城市编号")
shop_county = serializers.CharField(label="商铺区编号")
shop_address = serializers.CharField(label="详细地址")
shop_phone = serializers.CharField(label="商铺联系电话")
class SuperShopStatusSerializer(serializers.Serializer):
"""总后台商铺状态"""
shop_id = serializers.IntegerField(read_only=True, source="id", label="商铺id")
shop_name = serializers.CharField(read_only=True, label="商铺名称")
shop_img = serializers.CharField(read_only=True, label="商铺logo")
shop_address = serializers.CharField(read_only=True, label="详细地址")
shop_province = serializers.CharField(read_only=True, label="商铺省份编号")
shop_city = serializers.CharField(read_only=True, label="商铺城市编号")
shop_county = serializers.CharField(read_only=True, label="商铺区编号")
shop_status = serializers.IntegerField(
required=True, source="status", validators=[shop_status_validator], label="商铺状态"
)
create_time = serializers.DateTimeField(read_only=True, format=DateFormat.TIME, label="商铺创建时间")
creator = UserSerializer(read_only=True, label="商铺创建者")
operate_time = serializers.DateTimeField(read_only=True, source="update_at", format=DateFormat.TIME, label="操作时间")
operator = operatorSerializer(read_only=True, label="审核操作人")
reject_reason = serializers.CharField(required=False, default='', label="拒绝理由")
description = serializers.CharField(read_only=True, label="商铺描述")
inviter_phone = serializers.CharField(read_only=True, label="推荐人手机号")
current_realname = serializers.CharField(read_only=True, label="创建时的用户真实姓名")
def update(self, instance, validated_data):
shop_status = validated_data["status"]
instance.status = shop_status
if shop_status == ShopStatus.REJECTED:
create_shop_reject_reason_by_shop_id(instance.id, validated_data['reject_reason'])
instance.save()
return instance
class SuperShopVerifySerializer(serializers.Serializer):
"""总后台商铺认证状态"""
shop_id = serializers.IntegerField(source='id', read_only=True, label="商铺id")
verify_status = serializers.IntegerField(
write_only=True, required=True, validators=[shop_verify_status_validator], label="商铺认证状态"
)
verify_type = serializers.IntegerField(
write_only=True, required=True, validators=[shop_verify_type_validator], label="商铺认证类型,个人/企业"
)
verify_content = serializers.CharField(
write_only=True, min_length=0, max_length=200, required=True, label="认证内容"
)
def update(self, instance, validated_data):
cerify_active = validated_data["verify_status"]
verify_type = validated_data["verify_type"]
verify_content = validated_data["verify_content"]
instance.cerify_active = cerify_active
instance.shop_verify_type = verify_type
instance.shop_verify_content = verify_content
instance.save()
return instance
class ShopPayChannelSerializer(serializers.Serializer):
"""总后台支付渠道序列化器类"""
smerchant_no = serializers.CharField(label="商户号")
terminal_id1 = serializers.CharField(label="终端号1")
access_token = serializers.CharField(label="扫呗access_token")
channel_type = serializers.IntegerField(label="支付渠道, 1:利楚, 2:建行")
def create(self, validated_data):
shop = self.context["shop"]
shop_pay_channel = create_pay_channel(validated_data, shop.id)
return shop_pay_channel
<file_sep>/wsc_django/wsc_django/apps/config/constant.py
# 打印机品牌
class PrinterBrand:
YILIANYUN = 1
FEIYIN = 2
FOSHANXIXUN = 3
S1 = 4
S2 = 5
SENGUO = 6
PRINTER_BRAND_TEXT = {PrinterBrand.YILIANYUN: "易联云"}
# 打印机类型
class PrinterType:
LOCAL = 1
NET = 2
# 打印模板
class PrinterTemp:
ONE = 1
# 订单自动打印
class PrinterAutoPrint:
YES = 1
NO = 0
# 打印机状态
class PrinterStatus:
NORMAL = 1
DELETE = 0
# 打印订单号条码
class ReceiptBrcodeActive:
YES = 1
NO = 0
# 分享设置模板信息
class ShareSetUpTemplate:
CUSTOM_TITLE_NAME = "精打细算,还是来这划算!【{shop_name}】" # 自定义标题名称模板
CUSTOM_SHARE_DESCRIPTION = "我发现了一家很不错的店铺,地址分享给你,快来一起买买买吧!" # 自动以分享描述模板
<file_sep>/wsc_django/wsc_django/apps/delivery/urls.py
"""
配送相关的路由
"""
from django.urls import path, re_path
from delivery import views
urlpatterns = [
path('api/admin/delivery-config/', views.AdminDeliveryConfigView.as_view()), # 后台获取配送配置
path('api/admin/delivery-config/home/', views.AdminDeliveryConfigHomeView.as_view()), # 送货上门设置
path('api/admin/delivery-config/pick/', views.AdminDeliveryConfigPickView.as_view()), # 自提设置
path('api/admin/delivery-config/method/', views.AdminDeliveryConfigMethodView.as_view()), # 开启/关闭配送或者自提
]
<file_sep>/wsc_django/wsc_django/apps/staff/urls.py
"""
员工相关的路由
"""
from django.urls import path, re_path
from staff import views
urlpatterns = [
re_path(r'^api/staff/apply/(?P<shop_code>\w+)/$', views.StaffApplyView.as_view()), # 提交员工申请&获取申请信息
path('api/admin/staff/', views.AdminStaffView.as_view()), # 员工详情&编辑员工&删除员工
path('api/admin/staff/apply/', views.AdminStaffApplyView.as_view()), # 员工申请列表&通过员工申请
path('api/admin/staffs/', views.AdminStaffListView.as_view()), # 员工列表
]
<file_sep>/wsc_django/wsc_django/apps/staff/models.py
from django.db import models
from shop.models import Shop
from user.models import User
from staff.constant import (
StaffStatus,
StaffApplyStatus,
StaffApplyExpired,
)
from wsc_django.utils.models import TimeBaseModel
class Staff(TimeBaseModel):
"""员工模型类"""
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, null=False, verbose_name="员工对应的商铺对象")
user = models.ForeignKey(User, on_delete=models.CASCADE, null=False, verbose_name="员工对应的用户对象")
roles = models.SmallIntegerField(null=False, default=0, verbose_name="角色,二进制运算进行校验")
permissions = models.BigIntegerField(null=False, default=0, verbose_name="权限,二进制运算进行校验")
status = models.SmallIntegerField(
null=False,
default=StaffStatus.NORMAL,
verbose_name="员工状态,0:删除,1:正常",
)
position = models.CharField(max_length=16, default="无", verbose_name="员工职位")
entry_date = models.DateField(auto_now_add=True, verbose_name="员工入职时间")
remark = models.CharField(max_length=32,default="", verbose_name="备注")
class Meta:
db_table = "staff"
verbose_name = "员工"
verbose_name_plural = verbose_name
class StaffApply(TimeBaseModel):
"""员工申请表模型类"""
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, null=False, verbose_name="对应的商铺对象")
user = models.ForeignKey(User, on_delete=models.CASCADE, null=False, verbose_name="对应的用户对象")
status = models.SmallIntegerField(
null=False,
default=StaffApplyStatus.APPLYING,
verbose_name="申请状态,0:未申请,1;申请中,2:已通过"
)
expired = models.SmallIntegerField(
null=False,
default=StaffApplyExpired.NO,
verbose_name="申请信息是否过期,0:未过期,1:已过期"
)
class Meta:
db_table = "staff_apply"
verbose_name = "员工申请表"
verbose_name_plural = verbose_name
<file_sep>/wsc_django/wsc_django/apps/payment/views.py
import urllib.parse
import json
from django.shortcuts import redirect
from rest_framework.response import Response
from webargs import fields
from webargs.djangoparser import use_args
from payment.interface import get_user_openid_interface, create_user_openid_interface, pay_order_interfaces
from payment.service import get_openid_redirect_url, handle_lcsw_callback
from wsc_django.utils.views import MallBaseView, GlobalBaseView
class MallPaymentOpenIdView(MallBaseView):
"""商城获取支付的open_id"""
@use_args(
{
"redirect_uri": fields.String(
required=True, comment="获取公众号配置重定向的前端路由,前端传递的是**需要跳转到的页面**的前端路由"
)
},
location="query"
)
def get(self, request, args, shop_code):
self._set_current_shop(request, shop_code)
success, user_openid = get_user_openid_interface(
self.current_user.id, "lcwx-{}".format(self.current_shop.id)
)
if success:
return self.send_success(wx_openid=user_openid.wx_openid)
success, redirect_url = get_openid_redirect_url(
self.current_shop,
urllib.parse.quote(args["redirect_uri"], safe=""),
)
if not success:
return self.send_fail(error_text=redirect_url)
return self.send_fail(error_text="获取openid失败", error_redirect=redirect_url)
class MallOpenidLcswView(MallBaseView):
"""利楚商务openid接口"""
@use_args(
{
"openid": fields.String(required=True, comment="利楚返回的openid"),
"redirect": fields.String(
required=True, comment="openid处理完成后的重定向页面"
),
},
location="query"
)
def get(self, request, args, shop_code):
self._set_current_shop(request, shop_code)
mp_appid = "lcwx-{}".format(self.current_shop.id)
success, user_openid = get_user_openid_interface(
self.current_user.id, mp_appid
)
if success:
user_openid.set_wx_openid(args["openid"])
user_openid.save()
else:
create_user_openid_interface(
self.current_user.id, mp_appid, args["openid"]
)
return redirect(urllib.parse.unquote(args["redirect"]))
class LcswPaymentCallbackView(GlobalBaseView):
"""利楚支付回调"""
def post(self, request):
""" 回调传入参数举例
{"attach":"SENGUOPRODUCT","channel_trade_no":"4000082001201707140681458896","end_time":"20170714105108",
"key_sign":"fce359fd0dd87d7d52d374de7be40657","merchant_name":"贵港市港北区优鲜果品经营部","merchant_no":"862500210000002",
"out_trade_no":"101947210121317071410510301546","pay_type":"010","receipt_fee":"1","result_code":"01","return_code":"01",
"return_msg":"支付成功","terminal_id":"10194721","terminal_time":"20170714105103","terminal_trace":"c1992000206",
"total_fee":"1","user_id":"ojUElxOZlqPRYdXnOOzzVoKToTR0"}
响应码:01成功 ,02失败,响应码仅代表通信状态,不代表业务结果
"""
res_text = request.data.decode(encoding="utf-8")
res_dict = json.loads(res_text)
# TODO: 日志点
_, order = handle_lcsw_callback(res_dict)
pay_order_interfaces(order)
# 订单提交成功微信提醒, 暂时只有普通订单才发送消息,且页面没有控制按钮
# todo 待写
ret_dict = {"return_code": "01", "return_msg": "success"}
return Response(data=ret_dict)<file_sep>/wsc_django/wsc_django/apps/delivery/services.py
import copy
import datetime
import decimal
from logs.constant import OrderLogType
from logs.services import create_order_log
from order.constant import OrderDeliveryMethod
from shop.models import Shop
from delivery.models import DeliveryConfig, PickPeriodConfigLine, Delivery
def create_delivery_config(shop_id: int):
"""
创建配送设置,所有属性赋默认值
:param shop_id: 商铺id
:return:
"""
delivery_config = DeliveryConfig(id=shop_id)
delivery_config.save()
return delivery_config
def create_pick_period_line(
delivery_config: DeliveryConfig, from_time: str, to_time: str
):
"""
创建自提时间段
:param delivery_config: 配送设置对象
:param from_time:自提起始时间
:param to_time:自提终止时间
:return:
"""
period_line = PickPeriodConfigLine(
delivery_config=delivery_config, from_time=from_time, to_time=to_time
)
period_line.save()
return period_line
def create_pick_period_lines(
delivery_config: DeliveryConfig, pick_period_lines: list
):
"""
批量创建自提时间段
:param delivery_config:
:param pick_period_lines: [
{"from_time":'xx', "to_time":'xx'},
{"from_time":'xx', "to_time":'xx'},
]
:return:
"""
period_line_list = []
for pick_period_line in pick_period_lines:
period_line = PickPeriodConfigLine(
delivery_config=delivery_config,
from_time=pick_period_line["from_time"],
to_time=pick_period_line["to_time"]
)
period_line_list.append(period_line)
PickPeriodConfigLine.objects.bulk_create(period_line_list)
def create_order_delivery(delivery_info: dict):
"""
创建一个订单配送记录
:param delivery_info:
:return:
"""
delivery = Delivery(**delivery_info)
delivery.save()
return delivery
def update_delivery_config(shop_id: int, args: dict, user_id: int = 0):
"""
更新配送设置
:param shop_id:
:param args:
:param user_id:
:return:
"""
success, delivery_config = get_delivery_config_by_shop_id(shop_id)
if not success:
return False, delivery_config
old_delivery_config = copy.deepcopy(delivery_config)
if args.get("pick_periods"):
# 删除原有的所有时间
delivery_config.pick_periods.delete()
# 添加新的配送时间
create_pick_period_lines(
delivery_config, args["pick_periods"]
)
# 更新配送设置主表字段
for k, v in args.items():
setattr(delivery_config, k, v)
delivery_config.save()
# 店铺至少开启一种配送方式
if not delivery_config.home_on and not delivery_config.pick_on:
return False, "店铺至少需要开启一种配送方式"
# 创建操作记录,user_id为0时为点击配送/自提按钮,无需记录操作日志
if user_id:
for k, v in args.items():
operate_type = getattr(OrderLogType, k.upper(), None)
if operate_type is None:
continue
old_value = round(float(getattr(old_delivery_config, k)), 2)
new_value = round(float(v), 2)
if old_value != new_value:
log_info = {
"shop_id": shop_id,
"operator_id": user_id,
"order_num": "0",
"order_id": "0",
"operate_type": operate_type,
"operate_content": "{}|{}".format(old_value, new_value),
}
create_order_log(log_info)
return success, ""
def get_delivery_config_by_shop_id(shop_id: int):
"""
通过店铺id获取商铺配送设置
:param shop_id:
:return:
"""
delivery_config = DeliveryConfig.objects.filter(id=shop_id).first()
if not delivery_config:
return False, "店铺配送设置不存在"
pick_periods = list_pick_peirods_by_delivery_config_id(delivery_config.id)
delivery_config.pick_periods = pick_periods
return True, delivery_config
def list_pick_peirods_by_delivery_config_id(delivery_config_id: int):
"""
通过配送设置id获取自提时间设置
:param delivery_config_id:
:return:
"""
pick_peirods_list = (
PickPeriodConfigLine.objects.filter(delivery_config_id=delivery_config_id).
order_by("from_time").
all()
)
return pick_peirods_list
def apply_promotion(
shop_id: int,
delivery_method: int,
order_amount: decimal.Decimal,
delivery_amount_net: decimal.Decimal,
):
"""
校验订单的配送优惠,并返回获取订单配送优惠前金额
:param shop_id:
:param delivery_method:
:param order_amount:
:param delivery_amount_net:
:return:
"""
success, delivery_config = get_delivery_config_by_shop_id(shop_id)
if not success:
return False, delivery_config
is_valid = delivery_config.is_delivery_method_valid(delivery_method)
if not is_valid:
return False, "配送方式无效,请重新选择或刷新页面后重试"
is_occupied = delivery_config.limit(delivery_method, order_amount)
if not is_occupied:
return False, "订单未到起送价"
promotion_amount = delivery_config.calculate(delivery_method, order_amount)
delivery_amount_gross = delivery_config.get_delivery_amount_gross(delivery_method)
if abs(delivery_amount_gross - promotion_amount - delivery_amount_net) > 0.01:
return False, "订单运费计算有误"
return True, delivery_amount_gross
def _convert_delivery_period(args: dict):
"""
订单配送时间段处理
:param args:
:return:
"""
if args["delivery_method"] == OrderDeliveryMethod.HOME_DELIVERY:
delivery_period = "立即配送"
else:
try:
day, period = args["delivery_period"].split(" ")
except ValueError:
return False, "配送日期参数错误"
if day == "今天":
day_converted = datetime.date.today().strftime("%Y-%m-%d")
elif day == "明天":
day_converted = (datetime.date.today() + datetime.timedelta(1)).strftime(
"%Y-%m-%d"
)
else:
return False, "配送日期参数错误"
delivery_period = "{day_converted} {period}".format(
day_converted=day_converted, period=period
)
return True, delivery_period
def get_order_delivery_by_delivery_id(delivery_id: int):
"""
获取一个订单的配送记录,仅商家送货有
:param delivery_id:
:return:
"""
delivery = Delivery.objects.filter(id=delivery_id).first()
return delivery
<file_sep>/wsc_django/wsc_django/apps/customer/services.py
import decimal
from django.db.models import Q
from customer.constant import MineAddressStatus, MineAddressDefault, CustomerPointType
from customer.models import Customer, CustomerPoint, MineAddress
#####################顾客相关#####################
from user.constant import USER_OUTPUT_CONSTANT
def create_customer(
user_id: int,
shop_id: int,
consume_amount: float = 0,
consume_count: int = 0,
point: float = 0,
remark: str = "",
):
"""
创建客户
:param user_id:
:param shop_id:
:param consume_amount:
:param consume_count:
:param point:
:param remark:
:return:
"""
customer = Customer(
shop_id=shop_id,
user_id=user_id,
consume_amount=consume_amount,
consume_count=consume_count,
point=point,
remark=remark,
)
customer.save()
return customer
def update_customer_remark(customer: Customer, remark: str):
"""
更改客户备注
:param customer:
:param remark:
:return:
"""
customer.remark = remark
customer.save()
return True, ""
def update_customer_consume_amount_and_count_and_point_by_consume(
customer_id: int, consume_amount: decimal
):
"""
根据订单记录的客户ID更新客户的消费总额与消费次数与积分
:param customer_id:
:param consume_amount:
:return:
"""
customer = get_customer_by_customer_id(customer_id)
# 积分四舍五入
point = round(consume_amount)
# 客户首单, 积5分
if customer.consume_count == 0:
customer.point += decimal.Decimal(5)
create_customer_point(
customer.id,
customer.point,
decimal.Decimal(5),
CustomerPointType.FIRST,
)
customer.save()
customer.consume_amount += consume_amount
customer.consume_count += 1
customer.point += point
customer.save()
create_customer_point(
customer.id, customer.point, point, CustomerPointType.CONSUME
)
def update_customer_consume_amount_and_point_by_refund(
customer_id: int, consume_amount: decimal
):
"""
退款时,退消费总额,退积分,不退消费次数
:param customer_id:
:param consume_amount:
:return:
"""
customer = get_customer_by_customer_id(customer_id)
# 积分四舍五入
point = round(consume_amount)
# # 退首单,相应的退掉首单的积分
# if customer.consume_count == 1:
# point += decimal.Decimal(5)
customer.consume_amount -= consume_amount
customer.point -= point
customer.save()
create_customer_point(
customer.id, customer.point, -point, CustomerPointType.REFUND
)
def get_customer_by_customer_id(customer_id: int):
"""
通过客户ID查询客户
:param customer_id:
:return:
"""
customer = Customer.objects.filter(id=customer_id).first()
return customer
def get_customer_by_user_id_and_shop_id(user_id: int, shop_id: int):
"""
通过商铺ID和userID查到一个客户
:param user_id:
:return:
"""
customer = Customer.objects.filter(user_id=user_id, shop_id=shop_id).first()
return customer
def get_customer_by_user_id_and_shop_code(user_id: int, shop_code: str):
"""
通过商铺编号和userID查到一个客户
:param user_id:
:param shop_code:
:return:
"""
customer = Customer.objects.filter(user_id=user_id, shop__shop_code=shop_code).first()
return customer
def get_customer_by_customer_id_and_shop_id(
customer_id: int,
shop_id: int,
with_user_info: bool = False
):
"""
通过客户id和商铺id查询单个客户信息,包括详情
:param customer_id:
:param shop_id:
:param with_user_info: 带上用户信息
:return:
"""
customer = Customer.objects.filter(shop_id=shop_id, id=customer_id).first()
if customer and with_user_info:
for key in USER_OUTPUT_CONSTANT:
setattr(customer, key, getattr(customer.user, key))
return customer
def list_customer_by_shop_id(
shop_id: int,
sort_prop: str,
sort: str,
keyword: str,
):
"""
获取一个店铺的客户列表
:param shop_id:
:param sort_prop:
:param sort:
:param keyword:
:return:
"""
customer_list_query = Customer.objects.filter(shop_id=shop_id)
if keyword:
customer_list_query = customer_list_query.filter(
Q(user__nickname__contains=keyword) |
Q(user__phone__contains=keyword)
)
if sort and sort_prop:
order_by = sort_prop
if sort == "desc":
order_by = "-{}".format(sort_prop)
else:
order_by = "create_date"
customer_list_query = customer_list_query.order_by(order_by)
customer_list = customer_list_query.all()
for customer in customer_list:
for _ in USER_OUTPUT_CONSTANT:
setattr(customer, _ , getattr(customer.user, _))
return customer_list
def list_customer_point_by_customer_id(customer_id: int):
"""
查看一个客户的历史积分记录
:param customer_id:
:return:
"""
customer_point_list_query = (
CustomerPoint.objects.filter(customer_id=customer_id).
order_by("-create_time", "-id")
)
customer_point_list = customer_point_list_query.all()
return customer_point_list
def list_customer_ids_by_user_id(user_id: int):
"""
通过user_id获取一个人的所有客户ID
:param user_id:
:return:
"""
customer_list = Customer.objects.filter(user_id=user_id).all()
customer_ids = [ c.id for c in customer_list]
return customer_ids
#####################积分相关#####################
def create_customer_point(
customer_id: int, current_point: decimal, point_change: decimal, type: int
):
customer_point = CustomerPoint.objects.create(
customer_id=customer_id, current_point=current_point, point_change=point_change, type=type
)
customer_point.save()
return customer_point
#####################地址相关#####################
def create_mine_address(address_info: dict, user_id: int, shop_id: int):
"""
创建一个地址
:param address_info: {
"name": "name",
"sex": "1",
"province": 420000,
"city": 420100,
"county": 420111,
"address": "光谷智慧谷一栋505",
"longitude": "90",
"latitude": "45",
"phone": "152xxxxxxxx",
"default": 1
}
:param user_id:
:param shop_id:
:return:
"""
mine_address = MineAddress.objects.create(user_id=user_id, shop_id=shop_id, **address_info)
mine_address.save()
return mine_address
def delete_mine_address_by_id(address_id: int, user_id: int, shop_id: int):
"""
根据address_id列表,user_id和shop_id删除我的地址
:param address_id:
:param user_id:
:param shop_id:
:return:
"""
mine_address = get_mine_address_by_id(address_id, user_id, shop_id)
if not mine_address:
return False, "地址不存在"
mine_address.status = MineAddressStatus.DELETE
mine_address.save()
return True, ""
def check_default_address(user_id: int, shop_id: int):
"""
若创建或修改地址为默认地址,且用户已设置默认地址,则将修改之前的默认地址
:param user_id:
:param shop_id:
:return:
"""
default_address = get_mine_default_address_by_user_id_and_shop_id(user_id, shop_id)
if default_address:
default_address.default = 0
default_address.save()
def get_mine_address_by_id(address_id: int, user_id: int, shop_id: int):
"""
通过user_id,shop_id和id获取一个地址对象
:param address_id:
:param shop_id:
:param user_id:
:return:
"""
mine_address = MineAddress.objects.filter(id=address_id, shop_id=shop_id, user_id=user_id).first()
return mine_address
def list_mine_address_by_user_id_and_shop_id(user_id: int, shop_id: list, filter_delete: bool = True):
"""
通过user_id和shop_id获取我的地址列表
:param user_id:
:param shop_id:
:return:
"""
mine_address_list_query = MineAddress.objects.filter(user_id=user_id, shop_id=shop_id)
if filter_delete:
mine_address_list_query.exclude(status=MineAddressStatus.DELETE)
mine_address_list_query = mine_address_list_query.order_by("-create_at", "-id")
mine_address_list = mine_address_list_query.all()
return mine_address_list
def get_mine_default_address_by_user_id_and_shop_id(user_id: int, shop_id: int):
"""
通过user_id和shop_id找到客户的默认地址
:param user_id:
:param shop_id:
:return:
"""
default_address = MineAddress.objects.filter(
user_id=user_id, shop_id=shop_id, default=MineAddressDefault.YES
).first()
return default_address
<file_sep>/wsc_django/wsc_django/apps/promotion/events.py
""" 具体的优惠活动事件 """
from promotion.abstract import PromotionEventTemplate
class GrouponEvent(PromotionEventTemplate):
_event_type = "1"<file_sep>/wsc_django/wsc_django/utils/models.py
from django.db import models
class TimeBaseModel(models.Model):
""" 标识字段,不参与任何业务,不增加索引 """
create_at = models.DateTimeField(auto_now_add=True, verbose_name="创建时间")
update_at = models.DateTimeField(auto_now=True, verbose_name="更新时间")
class Meta:
abstract = True # 说明是抽象模型类, 用于继承使用,数据库迁移时不会创建TimeBaseModel的表
<file_sep>/wsc_django/wsc_django/apps/payment/service.py
import hashlib
import json
import requests
from order.constant import OrderStatus
from order.models import Order
from order.selectors import get_order_by_num_for_update
from payment.models import OrderTransaction
from shop.models import Shop
from shop.services import get_shop_by_shop_id
from user.services import get_pay_channel_by_shop_id
from wsc_django.apps.settings import LCSW_CALLBACK_HOST, LCSW_HANDLE_HOST
from wsc_django.utils.lcsw import LcswPay
from wsc_django.utils.core import NumGenerator
def create_order_transaction(
order_id, transaction_id, receipt_fee, channel_trade_no
):
"""
创建订单交易记录
:param order_id:
:param transaction_id:
:param receipt_fee:
:param channel_trade_no:
:return:
"""
order_transaction = OrderTransaction(
order_id=order_id,
transaction_id=transaction_id,
receipt_fee=receipt_fee,
channel_trade_no=channel_trade_no,
)
order_transaction.save()
return order_transaction
def get_openid_redirect_url(shop: Shop, redirect: str):
"""
获取openid需要的重定向URL
:param shop:
:param redirect:
:return:
"""
success, pay_channel = get_pay_channel_by_shop_id(shop.id)
if not success:
return False, pay_channel
lc_redirect_uri = "{host}/mall/{shop_code}/openid/lcsw/?redirect={redirect}".format(
host=LCSW_CALLBACK_HOST,
shop_code=shop.shop_code,
redirect=redirect,
)
result = LcswPay.getAuthOpenidUrl(
pay_channel.smerchant_no,
pay_channel.terminal_id1,
pay_channel.access_token,
lc_redirect_uri,
)
return True, result
def handle_lcsw_callback(res_dict: dict):
"""
处理利楚回调
:param res_dict:
:return:
"""
# 业务结果检查
if res_dict["return_code"] == "02":
raise ValueError("LcCallBackFail", res_dict["return_msg"])
# 附加信息检查
if res_dict["attach"] != "SENGUOPRODUCT":
raise ValueError("LcCallBackFail", "附加信息有误")
# 订单有效性检查
num = res_dict["terminal_trace"]
order = get_order_by_num_for_update(num)
if not order:
raise ValueError("LcCallBackFail", "订单不存在: {}".format(num))
elif order.status != OrderStatus.UNPAID:
raise ValueError("LcCallBackFail", "订单状态错误: {}".format(order.status))
# 店铺检查及验签
shop_id, _ = NumGenerator.decode(num)
shop = get_shop_by_shop_id(shop_id)
if not shop:
raise ValueError("LcCallBackFail", "找不到对应的店铺")
success, pay_channel = get_pay_channel_by_shop_id(shop_id)
if not success:
raise ValueError("LcCallBackFail", pay_channel)
key_sign = res_dict["key_sign"]
str_sign = (
LcswPay.getStrForSignOfTradeNotice(res_dict)
+ "&access_token=%s" % pay_channel.access_token
)
if key_sign != hashlib.md5(str_sign.encode("utf-8")).hexdigest().lower():
raise ValueError("LcCallBackFail", "签名有误")
# 检查业务结果:01成功 02失败
result_code = res_dict["result_code"]
if result_code == "02":
raise ValueError("LcCallBackFail", res_dict["return_msg"])
# TODO: 考虑是否进行回调的幂等检查
create_order_transaction(
order.id,
res_dict["out_trade_no"],
res_dict["receipt_fee"],
res_dict["channel_trade_no"],
)
return True, order
def payment_query(order: Order):
"""
订单查询支付情况, 直接返回字典
:param order:
:return: 类型说明:0-支付查询中|1-支付出错|2-支付成功
字典说明:out_trade_no,channel_trade_no必有的;0/1,+msg;2,+total_fee
"""
success, pay_channel = get_pay_channel_by_shop_id(order.shop.id)
if not success:
return 1, pay_channel
pay_type = "010"
params = LcswPay.getQueryParas(
pay_type,
order.order_num,
"",
pay_channel.smerchant_no,
pay_channel.terminal_id1,
pay_channel.access_token,
pay_trace=order.order_num,
pay_time=order.create_time.strftime("%Y%m%d%H%M%S"),
)
ret_dict = {}
try:
r = requests.post(
LCSW_HANDLE_HOST + "/pay/100/query",
data=json.dumps(params),
verify=False,
headers={"content-type": "application/json"},
timeout=(1, 5),
)
res_dict = json.loads(r.text)
except BaseException:
ret_dict["msg"] = "正在查询支付结果,请稍候(LCER1)..."
return 0, ret_dict
ret_dict["out_trade_no"] = res_dict.get("out_trade_no", "")
ret_dict["channel_trade_no"] = res_dict.get("channel_trade_no", "")
# 响应码:01成功 02失败,响应码仅代表通信状态,不代表业务结果
if res_dict["return_code"] == "02":
if res_dict["return_msg"] == "订单信息不存在!":
ret_dict["msg"] = "等待用户付款中,请提醒用户在手机上完成支付..."
return 0, ret_dict
else:
ret_dict["msg"] = res_dict["return_msg"]
return 1, ret_dict
key_sign = res_dict["key_sign"]
for key in res_dict:
if res_dict[key] is None:
res_dict[key] = "null"
str_sign = LcswPay.getStrForSignOfQueryRet(res_dict)
if key_sign != hashlib.md5(str_sign.encode("utf-8")).hexdigest().lower():
ret_dict["msg"] = "签名错误"
return 1, ret_dict
# 业务结果:01成功 02失败 03支付中
result_code = res_dict["result_code"]
if result_code == "02":
ret_dict["msg"] = res_dict["return_msg"]
return 1, ret_dict
elif result_code == "03":
ret_dict["msg"] = "等待用户付款中,请提醒用户在手机上完成支付..."
return 0, ret_dict
else:
ret_dict["total_fee"] = int(res_dict["total_fee"])
return 2, ret_dict
def get_wx_jsApi_pay(order: Order, wx_openid: str):
"""
公众号支付参数获取
:param order:
:param wx_openid:
:return:
"""
shop = get_shop_by_shop_id(order.shop.id)
success, pay_channel = get_pay_channel_by_shop_id(order.shop.id)
if not success:
return False, pay_channel
body = "{}-订单号-{}".format(shop.shop_name, order.order_num)
notify_url = "{}/payment/lcsw/callback/order/".format(LCSW_CALLBACK_HOST)
parameters = LcswPay.getJspayParas(
order.order_num,
wx_openid,
order.create_time.strftime("%Y%m%d%H%M%S"),
int(round(order.total_amount_net * 100)),
body,
notify_url,
pay_channel.smerchant_no,
pay_channel.terminal_id1,
pay_channel.access_token,
)
try:
r = requests.post(
LCSW_HANDLE_HOST + "/pay/100/jspay",
data=json.dumps(parameters),
verify=False,
headers={"content-type": "application/json"},
timeout=(1, 5),
)
res_dict = json.loads(r.text)
except BaseException:
return False, "微信支付预下单失败:接口超时或返回异常(LC)"
# 响应码:01成功 ,02失败,响应码仅代表通信状态,不代表业务结果
if res_dict["return_code"] == "02":
return (
False,
"微信支付通信失败:{msg}".format(msg=res_dict["return_msg"]),
)
key_sign = res_dict["key_sign"]
str_sign = LcswPay.getStrForSignOfJspayRet(res_dict)
if key_sign != hashlib.md5(str_sign.encode("utf-8")).hexdigest().lower():
return False, "微信支付校验失败:签名错误(LC)"
# 业务结果:01成功 02失败
result_code = res_dict["result_code"]
if result_code == "02":
return (False, "微信支付业务失败:{msg}".format(msg=res_dict["return_msg"]))
renderPayParams = {
"appId": res_dict["appId"],
"timeStamp": res_dict["timeStamp"],
"nonceStr": res_dict["nonceStr"],
"package": res_dict["package_str"],
"signType": res_dict["signType"],
"paySign": res_dict["paySign"],
}
return True, renderPayParams
def get_order_transaction_by_order_id(order_id: int):
"""
通过订单id获取交易记录
:param order_id:
:return:
"""
order_transaction = OrderTransaction.objects.filter(order_id=order_id).first()
return order_transaction
<file_sep>/wsc_django/wsc_django/apps/product/services.py
from collections import defaultdict
from django.db.models import Count, Q
from logs.constant import ProductLogType
from logs.services import create_product_log
from promotion.constant import PromotionType
from promotion.services import get_product_promotion, set_product_promotion, list_product_promotions
from shop.models import Shop
from product.models import ProductGroup, Product, ProductPicture
from product.constant import (
ProductGroupDefault,
ProductOperationType,
ProductStatus,
)
from storage.services import create_product_storage_record
################# 货品相关 ##################
def create_product(product_info: dict, user_id: int):
"""
添加一个货品
:param product_info:{"name": "apple", "group_id": 1, "price": 12.3, "code": "apple123",
"summary": "这是一个苹果", "pictures": ["http://abc", "http://bcd"],
"description": "这是货品描述", "cover_image_url": "http://qwe",
"shop_id": 104}
:param user_id: 操作人的user_id
:return:
"""
product = Product(**product_info)
product.save()
# 生成操作记录
log_info = {
"shop_id": product.shop.id,
"operator_id": user_id,
"operate_type": ProductLogType.ADD_PRODUCT,
"operate_content": product.name,
}
create_product_log(log_info)
return product
def create_product_pictures(product_id: int, image_url_list: list):
"""
添加货品的一个轮播图
:param product_id:
:param image_url:
:return:
"""
product_picture_list = []
for image_url in image_url_list:
product_picture = ProductPicture(product_id=product_id, image_url=image_url)
product_picture_list.append(product_picture)
product_pictures = ProductPicture.objects.bulk_create(product_picture_list)
return product_pictures
def update_product_storage_and_create_record(
product: Product,
user_id: int,
change_storage: int,
record_type: int,
operator_type: int,
order_num=None,
):
"""
更新一个商品的库存,同时生成库存更改记录
:param product: 货品对象
:param user_id: 操作人的user_id
:param change_storage: 库存变更量
:param record_type: 变更类型 手动修改1,商城售出2,订单取消3,订单退款4
:param operator_type: 操作人类型,员工or客户
:param order_num:
:return:
"""
product.storage += change_storage
# 库存为0 自动下架
if product.storage <= 0:
product.status = ProductStatus.OFF
# 生成库存变更记录
product.save()
product_storage_record = {
"shop_id": product.shop.id,
"product_id": product.id,
"operator_type": operator_type,
"user_id": user_id,
"type": record_type,
"change_storage": change_storage,
"current_storage": product.storage,
}
if order_num:
product_storage_record["order_num"] = order_num
storage_record = create_product_storage_record(product_storage_record)
return storage_record
def update_product_storage_and_no_record(
product: Product,
user_id: int,
change_storage: int,
record_type: int,
operator_type: int,
order_num=None,
):
"""
更新一个商品的库存,但不生成库存更改记录
:param product: 货品对象
:param user_id: 操作人的user_id
:param change_storage: 库存变更量
:param record_type: 变更类型 手动修改1,商城售出2,订单取消3,订单退款4
:param operator_type: 操作人类型,员工or客户
:param order_num:
:return:
"""
product.storage += change_storage
# 库存为0 自动下架
if product.storage <= 0:
product.status = ProductStatus.OFF
# 生成库存变更记录
product.save()
product_storage_record = {
"shop_id": product.shop.id,
"product_id": product.id,
"operator_type": operator_type,
"user_id": user_id,
"type": record_type,
"change_storage": change_storage,
"current_storage": product.storage,
}
if order_num:
product_storage_record["order_num"] = order_num
return product_storage_record
def update_product_product_group_by_ids(product_ids: list, group_id):
"""
更新货品的分组
:param product_ids:
:param group_id:
:return:
"""
Product.objects.filter(id__in=product_ids).update(group_id=group_id)
def update_products_status(
product_list: list, operation_type: int, product_ids_set: set
):
"""
批量修改货品的状态(上架,下架)
:param product_list:
:param operation_type: 1:上架,2:下架
:return:
"""
product_name_list = []
if operation_type == ProductOperationType.ON:
status = ProductStatus.ON
else:
status = ProductStatus.OFF
for p in product_list:
# 跳过库存不足的货品
if (
status == ProductOperationType.ON
and p.storage <= 0
):
continue
# 跳过可能存在拼团活动的货品
if (
status == ProductOperationType.OFF
and p.id in product_ids_set
):
continue
p.status = status
p.save()
product_name_list.append(p.name)
return product_name_list
def delete_product_picture_by_product_id(product_id: int):
"""
通过货品ID删除对应的轮播图
:param product_id:
:return:
"""
ProductPicture.objects.filter(product_id=product_id).delete()
def delete_product_by_ids_and_shop_id(
product_list: list, product_ids_set: set
):
"""
根据货品id列表和商铺id删除货品
:param product_list
:param product_ids_set:
:return:
"""
product_name_list = []
for product in product_list:
# 跳过可能存在拼团活动的货品
if product.id in product_ids_set:
continue
product.status = ProductStatus.DELETED
product.save()
product_name_list.append(product.name)
return product_name_list
def sort_shop_product_group(shop_id: int, group_ids: list):
"""
给一个店铺的货品分组排序
:param shop_id:
:param group_ids:
:return:
"""
# 按照传过来的group_id的顺序排序
product_group_list = list_product_group_by_shop_id(shop_id)
group_id_2_group = {pgl.id: pgl for pgl in product_group_list}
for i, id in enumerate(group_ids):
group = group_id_2_group.get(id)
# 防止存在已删除的分组
if group:
group.sort = i
group.save()
def get_product_by_id(
shop_id: int,
product_id: int,
with_picture: bool = False,
filter_delete: bool = True,
):
"""
根据货品ID, 货品ID来获取单个货品详情, 包括轮播图
:param shop_id:
:param product_id:
:param with_picture: 包括轮播图
:param filter_delete: 过滤删除
:return:
"""
# 查询货品信息, 包括分组名
product_query = Product.objects.filter(id=product_id, shop_id=shop_id)
if product_query and filter_delete:
product_query = product_query.exclude(status=ProductStatus.DELETED)
product = product_query.first()
# 查询货品轮播图
if product and with_picture:
product_pictures = ProductPicture.objects.filter(product_id=product_id).all()
product.pictures = [pp.image_url for pp in product_pictures]
if product and not with_picture:
product.pictures = []
return product
def get_product_with_group_name(shop_id: int, product_id: int):
"""
获取一个货品信息,并附带分组名
:param shop_id:
:param product_id:
:return:
"""
product = get_product_by_id(shop_id, product_id, with_picture=True)
if product:
# 营销信息
promotion = get_product_promotion(shop_id, product.id)
set_product_promotion(product, promotion)
product_group = get_product_group_by_shop_id_and_id(shop_id, product.group_id)
product.group_name = product_group.name
product.group_id = product_group.id
return product
def list_product_by_ids(shop_id: int, product_ids: list, filter_delete: bool = True):
"""
通过商铺id和货品id列表获取货品列表
:param shop_id:
:param product_ids:
:param filter_delete: 过滤删除
:return:
"""
product_list_query = Product.objects.filter(shop_id=shop_id, id__in=product_ids)
if filter_delete:
product_list_query = product_list_query.exclude(status=ProductStatus.DELETED)
product_list = product_list_query.all()
return product_list
def list_product_by_group_id_and_shop_id(
shop_id: int, group_id: int, filter_delete: bool = True
):
"""
:param shop_id:
:param group_id:
:param filter_delete: 过滤删除
:return:
"""
product_list_query = Product.objects.filter(group_id=group_id, shop_id=shop_id)
if filter_delete:
product_list_query = product_list_query.exclude(status=ProductStatus.DELETED)
product_list = product_list_query.all()
return product_list
def list_product_by_filter(
shop_id: int,
status: list,
keyword: str,
group_id: int,
promotion_types: list = None
):
"""
根据店铺ID, 关键字, 分组ID, 货品状态获取货品列表
:param shop_id:
:param group_id:
:param keyword:
:param status:
:param promotion_types:
:return:
"""
product_list_query = Product.objects.filter(shop_id=shop_id, status__in=status)
if keyword:
product_list_query = product_list_query.filter(
Q(name__contains=keyword) |
Q(name_acronym__contains=keyword)
)
if group_id > 0:
product_list_query = product_list_query.filter(group_id=group_id)
# 营销玩法筛选, 转换成基于id的筛选
map_product_promotions = list_product_promotions(shop_id)
if promotion_types:
# 活动商品归类
map_promotion_products = defaultdict(set)
for product_id, promotion in map_product_promotions.items():
map_promotion_products[int(promotion._event_type)].add(
product_id
) # promotion._event_type str
all_product_id = list_product_ids_by_shop_id(shop_id, status)
normal_product_ids = set(all_product_id) - set(map_product_promotions.keys())
map_promotion_products[PromotionType.NORMAL] = normal_product_ids
groupon_product_ids = set()
for promotion_type in promotion_types:
groupon_product_ids.update(
map_promotion_products.get(promotion_type, set())
)
product_list_query = product_list_query.filter(id__in=groupon_product_ids)
product_list_query = product_list_query.order_by('status', '-id')
product_list = product_list_query.all()
# 拼装营销活动类型
for product in product_list:
promotion = map_product_promotions.get(product.id)
promotion_type = (
int(promotion._event_type) if promotion else PromotionType.NORMAL
)
assert promotion_type in [PromotionType.NORMAL, PromotionType.GROUPON]
product.promotion_type = promotion_type
return product_list
def list_product_by_shop_id(shop_id: int, status=None):
"""
通过商店ID查询旗下的所有的所有货品
:param shop_id:
:param status:
:return:
"""
product_query = Product.objects.filter(shop_id=shop_id)
if isinstance(status, int):
product_query = product_query.filter(status=status)
elif isinstance(status, list):
product_query = product_query.filter(status__in=status)
product_list = product_query.all()
# 获取商品营销玩法信息
map_product_promotions = list_product_promotions(shop_id)
for product in product_list:
promotion = map_product_promotions.get(product.id)
set_product_promotion(product, promotion)
return product_list
def list_product_ids_by_shop_id(shop_id: int, status: list):
"""
通过商店ID查询旗下的所有的所有货品ID
:param shop_id:
:param status:
:return:
"""
product_ids = Product.objects.filter(shop_id=shop_id, status__in=status).values("id")
# 得到的是一个queryset,queryset无法set化
product_id_list = [product_id_dict["id"] for product_id_dict in product_ids]
return product_id_list
##################### 货品分组相关 #####################
def create_product_group(shop_id: int, user_id: int, product_group_info: dict):
"""
创建一个货品分组
:param shop_id:
:param user_id:
:param product_group_info:{"name": "分组1", "description":"描述"}
:return:
"""
product_group = ProductGroup.objects.create(shop_id=shop_id, **product_group_info)
product_group.set_default_sort()
product_group.save()
# 记录日志
log_info = {
"shop_id": product_group.shop_id,
"operator_id": user_id,
"operate_type": ProductLogType.ADD_PRODUCT_GROUP,
"operate_content": product_group.name,
}
create_product_log(log_info)
return product_group
def create_default_group_by_shop(shop: Shop):
"""
给商店创建一个默认分组
:param shop: 商铺对象
:return:
"""
default_product_group = ProductGroup.objects.create(
shop=shop, name="默认分组", default=ProductGroupDefault.YES
)
default_product_group.set_default_sort()
default_product_group.save()
return default_product_group
def update_product_group(
product_group: ProductGroup,
user_id: int,
shop_id: int,
name: str,
description: str,
):
"""
编辑分组信息
:param product_group:
:param user_id:
:param shop_id:
:param name:
:param description:
:return:
"""
product_group.name = name
product_group.description = description
product_group.save()
change = bool(product_group.name == name)
if change:
# 记录日志
log_info = {
"shop_id": shop_id,
"operator_id": user_id,
"operate_type": ProductLogType.UPDATE_PRODUCT_GROUP,
"operate_content": name,
}
create_product_log(log_info)
return product_group
def delete_product_group_by_id_and_shop_id(
product_group: ProductGroup, group_id: int, shop_id: int, user_id: int
):
"""
删除一个货品分组
:param product_group:
:param group_id:
:param shop_id:
:param user_id:
:return:
"""
if product_group.default == ProductGroupDefault.YES:
return False, "默认分组不可删除"
# 获取分组下货品
product_list = list_product_by_group_id_and_shop_id(
shop_id, group_id, filter_delete=False
)
# 获取默认分组
default_product_group = get_default_product_by_shop_id(shop_id)
# 修改货品所属分组为默认分组
product_list.update(group_id=default_product_group.id)
product_group.delete()
# 记录日志
log_info = {
"shop_id": shop_id,
"operator_id": user_id,
"operate_type": ProductLogType.DELETE_PRODUCT_GROUP,
"operate_content": product_group.name,
}
create_product_log(log_info)
return True, ""
def get_product_group_by_shop_id_and_id(shop_id: int, group_id: int):
"""
通过店铺ID与分组ID来查询货品分组
:param shop_id:
:param group_id:
:return:
"""
product_group = ProductGroup.objects.filter(shop_id=shop_id, id=group_id).first()
return product_group
def get_default_product_by_shop_id(shop_id: int):
"""
查询一个货品的默认分组
:param shop_id:
:return:
"""
product_group = ProductGroup.objects.filter(shop_id=shop_id, default=ProductGroupDefault.YES).first()
return product_group
def count_product_by_shop_ids(shop_ids: list):
"""
通过店铺id列表查找商铺的货品种类数量
:param shop_ids: 商铺id列表
:return:
"""
shop_product_count_dict = {shop_id: 0 for shop_id in shop_ids}
product_list = (
Product.objects.filter(shop_id__in=shop_ids)
.exclude(status=ProductStatus.DELETED)
.all()
)
for product in product_list:
shop_product_count_dict[product.shop.id] += 1
return shop_product_count_dict
def list_product_group_by_shop_id(shop_id: int):
"""
通过商铺id查询旗下的所有分组信息
:param shop_id:
:return:
"""
product_group_list = ProductGroup.objects.filter(shop_id=shop_id).order_by('sort').all()
return product_group_list
def list_product_group_with_product_count(shop_id: int, status: list):
"""
查询所有的分组并且包含分组对应的货品数量
:param shop_id:
:param status:
:return:
"""
# 查询分组信息
product_group_list = list_product_group_by_shop_id(shop_id)
# 查询货品数量
product_count_list = (
ProductGroup.objects.filter(shop_id=shop_id, product__status__in=status).
order_by('sort').
annotate(count=Count("product")).
all()
)
product_count_dict = {product_group.id: product_group.count for product_group in product_count_list}
for pg in product_group_list:
pg.product_count = product_count_dict.get(pg.id, 0)
return product_group_list
def list_product_group_with_product_list(
shop_id: int, status: int = ProductStatus.ON
):
"""
通过商店ID查询所有分组,并且将分组下的货品信息挂载
:param shop_id:
:param status:
:return:
"""
# 查询分组信息
product_group_list = list_product_group_by_shop_id(shop_id)
product_list = list_product_by_shop_id(shop_id, status)
product_group_index_dict = {}
for index, pgl in enumerate(product_group_list):
product_group_index_dict[pgl.id] = index
pgl.products = []
for pl in product_list:
product_group_list[product_group_index_dict[pl.group_id]].products.append(pl)
return product_group_list<file_sep>/wsc_django/wsc_django/apps/config/services.py
from config.constant import PrinterStatus, ShareSetUpTemplate
from config.models import Receipt, Printer, ShareSetup, SomeConfig, MsgNotify
from shop.models import Shop
def create_receipt_by_shop(shop_id: int):
"""
为店铺创建一个默认小票
:param shop_id: 商铺id
:return:
"""
receipt = Receipt(id=shop_id)
receipt.save()
return receipt
def create_share_setup(shop_id: int, shop_name: str):
"""
创建一个店铺的share_setup
:param shop_id:
:param shop_name:
:return:
"""
custom_title_name = ShareSetUpTemplate.CUSTOM_TITLE_NAME.format(shop_name=shop_name)
custom_share_description = ShareSetUpTemplate.CUSTOM_SHARE_DESCRIPTION
share_setup = ShareSetup(
id=shop_id,
custom_title_name=custom_title_name,
custom_share_description=custom_share_description,
)
share_setup.save()
return share_setup
def create_some_config_by_shop_id(shop_id: int):
"""
为店铺创建一些奇奇怪怪的设置
:param shop_id:
:return:
"""
config_info = {"id": shop_id}
some_config = SomeConfig(**config_info)
some_config.save()
return some_config
def create_printer_by_shop_id(shop_id: int, printer_info: dict):
"""
通过店铺ID给店铺添加一个打印机,目前一个店铺仅支持一个打印机
:param shop_id:
:param printer_info: {
"brand": 1,
"code": "xxxxxxxxx",
"key": "<KEY>",
"auto_print": 1,
}
:return:
"""
printer = get_printer_by_shop_id(shop_id)
if not printer:
printer_info["shop_id"] = shop_id
printer = Printer(**printer_info)
printer.save()
else:
for k, v in printer_info.items():
setattr(printer, k, v)
return printer
def create_receipt_by_shop_id(shop_id: int):
"""
通过shop_id为店铺创建一个默认小票
:param shop_id:
:return:
"""
receipt = Receipt(id=shop_id)
receipt.save()
return receipt
def create_msg_notify_by_shop_id(shop_id: int):
"""
通过店铺ID创建一个店铺的消息通知
:param shop_id:
:return:
"""
msg_notify = MsgNotify(id=shop_id)
msg_notify.save()
return msg_notify
def update_share_setup(shop_id: int, args: dict):
"""
编辑店铺分享配置
:param shop_id:
:param args:
:return:
"""
shop_share = get_share_setup_by_id(shop_id)
for k, v in args.items():
setattr(shop_share, k, v)
shop_share.save()
return shop_share
def update_some_config_by_shop_id(shop_id: int, new_config: dict):
"""
更改一些奇怪的配置
:param shop_id:
:param new_config:
:return:
"""
some_config = get_some_config_by_shop_id(shop_id)
for k, v in new_config.items():
setattr(some_config, k, v)
some_config.save()
return some_config
def update_receipt_by_shop_id(shop_id: int, receipt_info):
"""
通过shop_id更新其对应的小票设置
:param shop_id:
:param receipt_info:
:return:
"""
receipt = get_receipt_by_shop_id(shop_id)
if not receipt:
receipt = create_receipt_by_shop_id(shop_id)
for k, v in receipt_info.items():
setattr(receipt, k, v)
receipt.save()
return receipt
def update_msg_notify_by_shop_id(shop_id: int, msg_notify_info: dict):
"""
更新一个店铺的消息通知设置
:param shop_id:
:param msg_notify_info:
:return:
"""
msg_notify = get_msg_notify_by_shop_id(shop_id)
for k, v in msg_notify_info.items():
setattr(msg_notify, k, v)
msg_notify.save()
return msg_notify
def list_msg_notify_fields():
field_list = []
for fields in vars(MsgNotify).keys():
if fields.startswith("_") or fields in ["id", "create_at", "update_at"]:
continue
field_list.append(fields)
return field_list
def get_some_config_by_shop_id(shop_id: int):
"""
获取商铺的一些奇怪的配置
:param shop_id:
:return:
"""
some_config = SomeConfig.objects.filter(id=shop_id).first()
if not some_config:
some_config = create_some_config_by_shop_id(shop_id)
return some_config
def get_printer_by_shop_id(shop_id: int, filter_delete: bool = True):
"""
查找店铺的打印机
:param shop_id:
:param filter_delete:
:return:
"""
printer_query = Printer.objects.filter(shop_id=shop_id)
if filter_delete:
printer_query = printer_query.exclude(status=PrinterStatus.DELETE)
printer = printer_query.first()
return printer
def get_receipt_by_shop_id(shop_id: int):
"""
通过shop_id查找一个商铺的小票设置
:param shop_id:
:return:
"""
receipt = Receipt.objects.filter(id=shop_id).first()
return receipt
def get_share_setup_by_id(shop_id: int):
"""
通过店铺ID获取店铺的分享信息
:param shop_id:
:return:
"""
share_setup = ShareSetup.objects.filter(id=shop_id).first()
return share_setup
def get_msg_notify_by_shop_id(shop_id: int):
"""
获取一个店铺的消息通知设置
:param shop_id:
:return:
"""
msg_notify = MsgNotify.objects.filter(id=shop_id).first()
if not msg_notify:
msg_notify = create_msg_notify_by_shop_id(shop_id)
return msg_notify<file_sep>/wsc_django/wsc_django/apps/order/models.py
import datetime
from django.db import models
from customer.models import Customer
from delivery.models import Delivery
from groupon.models import GrouponAttend
from product.models import Product
from shop.models import Shop
from order.constant import (
OrderDeliveryMethod,
OrderStatus,
OrderPayType,
OrderType,
OrderRefundType,
)
from user.constant import Sex
from wsc_django.utils.core import FormatAddress
from wsc_django.utils.models import TimeBaseModel
class Order(TimeBaseModel):
"""订单模型类"""
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, null=False, verbose_name="订单对应的店铺对象")
customer = models.ForeignKey(Customer, on_delete=models.CASCADE, null=False, verbose_name="订单对应客户对象")
groupon_attend = models.ForeignKey(GrouponAttend, on_delete=models.CASCADE, null=True, verbose_name="订单对应拼团参与对象")
create_date = models.DateField(null=False, auto_now_add=True, verbose_name="下单日期")
create_time = models.DateTimeField(null=False, auto_now_add=True, verbose_name="下单时间")
delivery = models.ForeignKey(Delivery, null=True, on_delete=models.CASCADE, verbose_name="订单对应配送记录对象")
delivery_method = models.SmallIntegerField(
null=False,
default=OrderDeliveryMethod.HOME_DELIVERY,
verbose_name="配送方式,1:送货上门,2:客户自提",
)
delivery_period = models.CharField(max_length=32, null=False, verbose_name="自提处理时段")
order_num = models.CharField(max_length=20, null=False, unique=True, verbose_name="订单号")
order_status = models.SmallIntegerField(
null=False,
default=OrderStatus.UNPAID,
verbose_name="订单状态,具体见constant",
)
remark = models.CharField(max_length=64, default="", verbose_name="订单备注")
pay_type = models.SmallIntegerField(
null=False,
default=OrderPayType.ON_DELIVERY,
verbose_name="订单支付方式",
)
order_type = models.SmallIntegerField(
null=False,
default=OrderType.NORMAL,
verbose_name="订单类型,1:普通订单,2:拼团订单",
)
amount_gross = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
verbose_name="货款金额(优惠前)"
)
amount_net = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
verbose_name="货款金额(优惠后)"
)
delivery_amount_gross = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
verbose_name="货款金额运费(优惠前)",
)
delivery_amount_net = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
verbose_name="货款金额运费(优惠后)",
)
total_amount_gross = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
verbose_name="订单金额(优惠前)"
)
total_amount_net = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
verbose_name="订单金额(优惠后)"
)
refund_type = models.SmallIntegerField(
null=False,
default=OrderRefundType.UNDERLINE_REFUND,
verbose_name="订单退款方式",
)
class Meta:
db_table = "order"
verbose_name = "订单"
verbose_name_plural = verbose_name
def set_num(self, order_num: str):
"""设置订单号"""
self.order_num = order_num
@property
def delivery_period_text(self):
"""返回一个配送详情"""
if self.delivery_method == OrderDeliveryMethod.CUSTOMER_PICK:
day, period = self.delivery_period.split(" ")
if day == datetime.date.today().strftime("%Y-%m-%d"):
result = "今天 {}".format(period)
elif day == (datetime.date.today() + datetime.timedelta(1)).strftime(
"%Y-%m-%d"
):
result = "明天 {}".format(period)
else:
result = self.delivery_period
else:
result = self.delivery_period
return result
@property
def delivery_amount_text(self):
"""根据配送方式返回费用类型"""
if self.delivery_method == OrderDeliveryMethod.HOME_DELIVERY:
return "配送费"
else:
return "服务费"
@property
def pay_type_text(self):
"""返回付款类型"""
if self.pay_type == OrderPayType.WEIXIN_JSAPI:
return "微信支付"
else:
return "货到付款"
class OrderDetail(TimeBaseModel):
"""订单详情模型类"""
order = models.ForeignKey(Order, related_name="order_detail",on_delete=models.CASCADE, null=False, verbose_name="对应的订单对象")
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, null=False, verbose_name="对应的店铺对象")
product = models.ForeignKey(Product, on_delete=models.CASCADE, null=False, verbose_name="对应的货品对象")
customer = models.ForeignKey(Customer, on_delete=models.CASCADE, null=False, verbose_name="订单对应客户对象")
create_date = models.DateField(null=False, auto_now_add=True, verbose_name="下单日期")
quantity_gross = models.DecimalField(max_digits=13, decimal_places=4, null=False, verbose_name="量(优惠前)")
quantity_net = models.DecimalField(max_digits=13, decimal_places=4, null=False, verbose_name="量(优惠后)")
price_gross = models.DecimalField(max_digits=13, decimal_places=4, null=False, verbose_name="单价(优惠前)")
price_net = models.DecimalField(max_digits=13, decimal_places=4, null=False, verbose_name="单价(优惠后)")
amount_gross = models.DecimalField(max_digits=13, decimal_places=4, null=False, verbose_name="金额(优惠前)")
amount_net = models.DecimalField(max_digits=13, decimal_places=4, null=False, verbose_name="金额(优惠后)")
status = models.SmallIntegerField(null=False, verbose_name="订单状态,同order")
pay_type = models.SmallIntegerField(null=False, verbose_name="支付方式,同order")
refund_type = models.SmallIntegerField(null=True, verbose_name="退款方式,同order")
promotion_type = models.SmallIntegerField(
null=False,
default="",
verbose_name="活动类型(预留)",
)
class Meta:
db_table = "order_detail"
verbose_name = "订单详情"
verbose_name_plural = verbose_name
class OrderAddress(TimeBaseModel):
"""订单地址模型类"""
order = models.ForeignKey(Order, on_delete=models.CASCADE, null=False, verbose_name="对应的订单对象")
province = models.IntegerField(verbose_name="省份编码")
city = models.IntegerField(verbose_name="城市编码")
county = models.IntegerField(verbose_name="区编码")
address = models.CharField(max_length=64, null=False, verbose_name="详细地址")
added = models.CharField(max_length=50, null=True, verbose_name="补充说明")
name = models.CharField(max_length=32, null=False, verbose_name="收件人姓名")
sex = models.SmallIntegerField(null=False, default=Sex.UNKNOWN, verbose_name="收件人性别,0:未知1:男2:女")
phone = models.CharField(max_length=32, default="", verbose_name="收件人手机号")
class Meta:
db_table = "order_address"
verbose_name = "订单地址"
verbose_name_plural = verbose_name
@property
def full_address(self):
return FormatAddress.get_format_address(
self.province, self.city, self.county, self.address
)
@property
def sex_text(self):
if self.sex == Sex.MALE:
result = "先生"
elif self.sex == Sex.FEMALE:
result = "女士"
else:
result = "未知"
return result<file_sep>/wsc_django/wsc_django/apps/payment/models.py
from django.db import models
from order.models import Order
from wsc_django.utils.models import TimeBaseModel
class OrderTransaction(TimeBaseModel):
"""订单在线支付信息模型类"""
order = models.ForeignKey(Order, on_delete=models.CASCADE, null=False, verbose_name="对应订单对象")
receipt_fee = models.IntegerField(null=False, verbose_name="实际支付金额")
transaction_id = models.CharField(max_length=64, null=False, verbose_name="支付交易单号")
channel_trade_no = models.CharField(max_length=64, null=False, verbose_name="支付通道的支付单号")
class Meta:
db_table = "order_transaction"
verbose_name = "订单在线支付信息"
verbose_name_plural = verbose_name
<file_sep>/wsc_django/wsc_django/apps/shop/interface.py
from config.services import get_some_config_by_shop_id, get_share_setup_by_id
from delivery.services import get_delivery_config_by_shop_id
from product.services import count_product_by_shop_ids
from staff.services import list_staff_by_user_id
from user.services import get_user_by_id, list_user_by_ids
from customer.services import get_customer_by_user_id_and_shop_id
def get_user_by_id_interface(user_id: int) -> object:
"""
获取一个用户
:param user_id:
:return:
"""
user = get_user_by_id(user_id)
return user
def get_customer_by_user_id_and_shop_id_interface(user_id: int, shop_id: int) -> object:
"""
通过user_id和shop_id获取一个客户
:param user_id:
:return:
"""
customer = get_customer_by_user_id_and_shop_id(user_id, shop_id)
return customer
def get_some_config_by_shop_id_interface(shop_id: int):
"""获取店铺的一些配置"""
some_config = get_some_config_by_shop_id(shop_id)
return some_config
def get_delivery_config_by_shop_id_interface(shop_id: int):
"""获取一个店铺的配送设置"""
delivery_config = get_delivery_config_by_shop_id(shop_id)
return delivery_config
def get_share_setup_by_id_interface(shop_id: int):
"""获取一个店铺的分享设置"""
share_setup = get_share_setup_by_id(shop_id)
return share_setup
def count_product_by_shop_ids_interface(shop_ids: list):
map_shop_product_count = count_product_by_shop_ids(shop_ids)
return map_shop_product_count
def list_staff_by_user_id_interface(user_id: int, roles: int) -> list:
"""
获取一个用户的所有员工信息
:param user_id:
:param roles:
:return:
"""
staff_list = list_staff_by_user_id(user_id, roles)
return staff_list
def list_user_by_ids_interface(user_ids: list) -> list:
"""
通过user_ids列出用户
:param user_ids:
:return:
"""
user_list = list_user_by_ids(user_ids)
return user_list<file_sep>/wsc_django/wsc_django/apps/ws/urls.py
"""
websocket相关的路由
"""
from django.urls import path
urlpatterns = [
]
<file_sep>/wsc_django/wsc_django/apps/my_celery/celery_sms_task.py
""" 短信异步任务 """
<file_sep>/wsc_django/wsc_django/apps/promotion/services.py
from django_redis import get_redis_connection
from product.models import Product
from promotion.abstract import PromotionEventTemplate
from promotion.events import GrouponEvent
# 商品正在正在进行的营销活动的键
PRODUCT_PROMOTION_KEY = "promotion:shop:{shop_id}:product:{product_id}"
def publish_product_promotion(
shop_id: int, product_id: int, event: PromotionEventTemplate, ttl: int
):
"""
发布一条营销活动
:param shop_id:
:param product_id:
:param event:
:param ttl:
:return:
"""
key = PRODUCT_PROMOTION_KEY.format(shop_id=shop_id, product_id=product_id)
redis_conn = get_redis_connection("subscribe")
redis_conn.hmset(key, event.get_event())
redis_conn.expire(key, ttl)
def stop_product_promotion(shop_id: int, product_id: int) -> None:
"""
停用一个正在进行中的营销活动
:param shop_id: 店铺id
:param product_id: 商品id
"""
key = PRODUCT_PROMOTION_KEY.format(shop_id=shop_id, product_id=product_id)
redis_conn = get_redis_connection("subscribe")
redis_conn.delete(key)
def set_product_promotion(product: Product, promotion: PromotionEventTemplate):
"""
校验营销活动,并给商品添加营销活动信息
:param product:
:param promotion:
:return:
"""
if not promotion:
return
if isinstance(promotion, GrouponEvent):
if (
hasattr(promotion, "success_limit")
and int(promotion.success_limit) != 0
and int(promotion.success_limit) <= int(promotion.succeeded_count)
):
return
product.groupon = promotion
else:
raise ValueError("Unknown promotion type")
def get_product_promotion(shop_id: int, product_id: int):
"""
获取单个店铺正在进行的营销活动
:param shop_id:
:param product_id:
:return:
"""
pattern = PRODUCT_PROMOTION_KEY.format(shop_id=shop_id, product_id=product_id)
redis_conn = get_redis_connection("subscribe")
if redis_conn.hlen(pattern):
redis_event_dict = redis_conn.hgetall(pattern)
event_dict = {
k.decode("utf-8"): v.decode("utf-8") for k, v in redis_event_dict.items()
}
if event_dict["event_type"] == GrouponEvent._event_type:
event = GrouponEvent(event_dict)
else:
raise ValueError("Unknown event type")
return event
else:
return None
def list_product_promotions(shop_id: int):
"""
获取店铺所有商品正在进行营销活动
:param shop_id:
:return:
"""
pattern = PRODUCT_PROMOTION_KEY.format(shop_id=shop_id, product_id="*")
redis_conn = get_redis_connection("subscribe")
keys = redis_conn.keys(pattern)
result = {}
for key in keys:
*_, product_id = key.decode("utf-8").split(":")
redis_event_dict = redis_conn.hgetall(key)
event_dict = {
k.decode("utf-8"): v.decode("utf-8") for k, v in redis_event_dict.items()
}
# 不同类型的时间返回不同的类
if event_dict["event_type"] == GrouponEvent._event_type:
event = GrouponEvent(event_dict)
else:
raise ValueError("Unknown event type")
result[int(product_id)] = event
return result
<file_sep>/wsc_django/wsc_django/apps/config/models.py
from django.db import models
# Create your models here.
from shop.models import Shop
from wsc_django.utils.models import TimeBaseModel
from config.constant import (
PrinterType,
PrinterTemp,
PrinterAutoPrint,
PrinterStatus,
ReceiptBrcodeActive,
)
class Printer(TimeBaseModel):
"""打印机模型类"""
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, verbose_name="打印机对应店铺")
type = models.SmallIntegerField(
null=False,
default=PrinterType.LOCAL,
verbose_name="打印机类型1:本地2:云, 预留",
)
brand = models.SmallIntegerField(
null=False,
verbose_name="打印机品牌 1:易联云, 2:飞印, 3:佛山喜讯, 4:365 S1, 5:365 S2, 6:森果"
)
code = models.CharField(max_length=32, default="", verbose_name="打印机终端号")
key = models.CharField(max_length=32, default="", verbose_name="打印机秘钥")
temp_id = models.SmallIntegerField(
null=False,
default=PrinterTemp.ONE,
verbose_name="打印模板, 预留",
)
auto_print = models.SmallIntegerField(
null=False,
default=PrinterAutoPrint.YES,
verbose_name="订单自动打印",
)
status = models.SmallIntegerField(
null=False,
default=PrinterStatus.NORMAL,
verbose_name="打印机状态,预留",
)
class Meta:
db_table = "printer"
verbose_name = "打印机"
verbose_name_plural = verbose_name
class Receipt(TimeBaseModel):
"""小票模型类"""
id = models.OneToOneField(
Shop,
primary_key=True,
null=False,
on_delete=models.CASCADE,
verbose_name="一个店铺对应一个小票,就直接绑定"
).primary_key
bottom_msg = models.CharField(max_length=128, null=False, default="", verbose_name="小票底部信息")
bottom_qrcode = models.CharField(max_length=128, null=False, default="", verbose_name="小票底部二维码")
bottom_image = models.CharField(max_length=512, null=False, default="", verbose_name="小票底部图片,预留")
brcode_active = models.SmallIntegerField(
null=False,
default=ReceiptBrcodeActive.NO,
verbose_name="打印订单号条码",
)
copies = models.SmallIntegerField(null=False, default=1, verbose_name="小票打印份数")
class Meta:
db_table = "receipt"
verbose_name = "小票"
verbose_name_plural = verbose_name
class MsgNotify(TimeBaseModel):
"""消息通知模型类"""
id = models.OneToOneField(Shop, primary_key=True, null=False, on_delete=models.CASCADE).primary_key
order_confirm_wx = models.BooleanField(null=False, default=False, verbose_name="开始配送/等待自提-微信")
order_confirm_msg = models.BooleanField(null=False, default=False, verbose_name="开始配送/等待自提-短信")
order_finish_wx = models.BooleanField(null=False, default=False, verbose_name="订单完成-微信")
order_finish_msg = models.BooleanField(null=False, default=False, verbose_name="订单完成-短信")
order_refund_wx = models.BooleanField(null=False, default=False, verbose_name="订单退款-微信")
order_refund_msg = models.BooleanField(null=False, default=False, verbose_name="订单退款-短信")
group_success_wx = models.BooleanField(null=False, default=False, verbose_name="成团提醒-微信")
group_success_msg = models.BooleanField(null=False, default=False, verbose_name="成团提醒-短信")
group_failed_wx = models.BooleanField(null=False, default=False, verbose_name="拼团失败-微信")
group_failed_msg = models.BooleanField(null=False, default=False, verbose_name="拼团失败-短信")
class Meta:
db_table = "msgnotfiy"
verbose_name = "消息通知"
verbose_name_plural = verbose_name
class ShareSetup(TimeBaseModel):
"""分享设置模型类"""
id = models.OneToOneField(Shop, primary_key=True, null=False, on_delete=models.CASCADE).primary_key
custom_title_name = models.CharField(
max_length=64, null=False, default="", verbose_name="自定义分享标题名称"
)
custom_share_description = models.CharField(
max_length=64, null=False, default="", verbose_name="自定义分享描述"
)
class Meta:
db_table = "share_setup"
verbose_name = "分享设置"
verbose_name_plural = verbose_name
class SomeConfig(TimeBaseModel):
"""一些杂乱的配置项,和店铺绑定的"""
id = models.OneToOneField(Shop, primary_key=True, null=False, on_delete=models.CASCADE).primary_key
show_off_product = models.BooleanField(null=False, default=True, verbose_name="货品板块显示已下架货品")
new_order_voice = models.BooleanField(null=False, default=True, verbose_name="新订单语音提醒")
weixin_jsapi = models.BooleanField(null=False, default=False, verbose_name="是否开启微信支付")
on_delivery = models.BooleanField(null=False, default=True, verbose_name="是否开启货到付款")
class Meta:
db_table = "some_config"
verbose_name = "一些杂乱的配置项"
verbose_name_plural = verbose_name
<file_sep>/wsc_django/wsc_django/apps/storage/services.py
from storage.constant import PRODUCT_STORAGE_RECORD_TYPE
from storage.models import ProductStorageRecord
def create_product_storage_record(record_info: dict):
"""
创建一条库存变更记录
:param record_info:{
"shop_id": 1,
"product_id": 1,
"operator_type": 1,
"user_id": 1,
"type": 1,
"change_storage": 1,
"current_storage": 2,
"order_num": 1xxxxx,
}
:return:
"""
record = ProductStorageRecord(**record_info)
record.save()
return record
def create_product_storage_records(storage_record_list: list):
"""
创建多条库存变更记录
:param storage_record_list: [{},{}]
:return: storage_record_list:[库存记录对象]
"""
record_list = [ProductStorageRecord(**storage_record) for storage_record in storage_record_list]
storage_record_list = ProductStorageRecord.objects.bulk_create(record_list)
return storage_record_list
def list_product_storage_record_by_product_id(
shop_id: int, product_id: int
):
"""
查询一个货品的库存变更记录
:param shop_id:
:param product_id:
:return:
"""
record_list = (
ProductStorageRecord.objects.filter(shop_id=shop_id, product_id=product_id)
.order_by('-create_time')
.all()
)
for record in record_list:
record.type_text = PRODUCT_STORAGE_RECORD_TYPE.get(record.type, "")
return record_list<file_sep>/wsc_django/wsc_django/apps/order/services.py
import decimal
import hashlib
import json
import uuid
import requests
from config.services import get_printer_by_shop_id
from customer.models import Customer
from delivery.services import _convert_delivery_period, apply_promotion
from groupon.models import GrouponAttend
from groupon.services import get_shop_groupon_attend_by_id
from logs.constant import OrderLogType
from logs.services import create_order_log
from order.constant import OrderStatus, OrderType, OrderPayType, OrderDeliveryMethod, OrderRefundType, \
MAP_EVENT_ORDER_TYPE
from order.models import Order, OrderDetail, OrderAddress
from payment.service import payment_query, create_order_transaction, get_order_transaction_by_order_id
from printer.services import print_order
from product.constant import ProductStatus
from product.services import update_product_storage_and_no_record, list_product_by_ids
from promotion.constant import PromotionType
from settings import LCSW_HANDLE_HOST
from shop.services import get_shop_by_shop_id
from storage.constant import ProductStorageRecordType, ProductStorageRecordOperatorType
from storage.services import create_product_storage_records
from user.services import get_pay_channel_by_shop_id
from ws.services import publish_admin
from wsc_django.utils.lcsw import LcswFunds, LcswPay
from customer.services import (
get_customer_by_customer_id_and_shop_id,
update_customer_consume_amount_and_point_by_refund,
update_customer_consume_amount_and_count_and_point_by_consume,
get_customer_by_user_id_and_shop_id, create_customer
)
from order.selectors import (
list_order_details_by_order_id,
get_order_by_shop_id_and_id,
get_shop_order_by_shop_id_and_id,
count_abnormal_order,
get_order_by_customer_id_and_groupon_attend_id
)
######### 订单活动检查相关 #########
_MAP_CHECK_PROMOTION_FUNC = {}
def register_check_promotion(type_):
def register(func):
_MAP_CHECK_PROMOTION_FUNC[type_] = func
return func
return register
@register_check_promotion(PromotionType.NORMAL)
def _check_normal(**__):
"""普通活动检查"""
class NormalEvent:
event_type = 0
return True, NormalEvent(), {}
@register_check_promotion(PromotionType.GROUPON)
def _check_groupon_attend(
shop_id: int, customer: Customer, promotion_attend_id: int, **__
):
"""检查拼团活动"""
success, groupon_attend = get_shop_groupon_attend_by_id(
shop_id, promotion_attend_id, for_update=True
)
if not success:
return False, groupon_attend, {}
order_attend = get_order_by_customer_id_and_groupon_attend_id(
customer.id, promotion_attend_id
)
if order_attend:
return False, "您已经参加过该团", {}
groupon_attend.event_type = 1
return True, groupon_attend, {"groupon_attend_id": groupon_attend.id}
######### 订单详情活动检查相关 #########
_MAP_GEN_ORDER_DETAIL_FUNC = {}
def register_gen_order_detail_info(type_):
def register(func):
_MAP_GEN_ORDER_DETAIL_FUNC[type_] = func
return func
return register
@register_gen_order_detail_info(PromotionType.NORMAL)
def _gen_normal_order_detail_info(item: dict, **__):
"""
生成普通订单的子订单信息
:param item: {
"product": product对象,
"quantity": quantity,
"price":price
"amount":amount
}
:param __:
:return:
"""
product = item["product"]
if abs(item["price"] - product.price) > 0.01:
return False, "商品信息变化,请刷新"
order_detail_info = {
"product_id": product.id,
"quantity_gross": item["quantity"],
"quantity_net": item["quantity"],
"price_gross": item["price"],
"price_net": item["price"],
"amount_gross": item["amount"],
"amount_net": item["amount"],
"promotion_type": 0,
}
return True, order_detail_info
@register_gen_order_detail_info(PromotionType.GROUPON)
def _gen_groupon_order_detail_info(
customer: object, item: dict, promotion_attend: GrouponAttend, **__
):
"""
生成拼团订单的子订单信息
:param customer:
:param item:{
"product": product对象,
"quantity": quantity,
"price":price
"amount":amount
}
:param promotion_attend:
:param __:
:return:
"""
product = item["product"]
if promotion_attend.groupon.product.id != product.id:
return False, "拼团商品不匹配,请重新下单"
# 拼团活动的校验
success, msg = promotion_attend.limit(customer, item["quantity"])
if not success:
return False, msg
price_net = promotion_attend.calculate()
if abs(item["price"] - price_net) > 0.01:
return False, "拼团商品信息有变化,请刷新"
order_detail_info = {
"product_id": product.id,
"quantity_gross": item["quantity"],
"quantity_net": item["quantity"],
"price_gross": product.price,
"price_net": item["price"],
"amount_gross": product.price * item["quantity"],
"amount_net": item["amount"],
"promotion_type": 1,
"promotion_attend_id": promotion_attend.id,
}
return True, order_detail_info
######### 订单相关 #########
def create_order(order_info: dict):
"""
创建一个订单
:param order_info:
:return:
"""
order = Order(**order_info)
order.save()
return order
def _create_order_details(
order: Order,
cart_items: list,
promotion_attend: object = None,
):
"""
创建订单详情
:param order:
:param cart_items:
:param promotion_attend:
:return:
"""
storage_record_list = []
for item in cart_items:
# 验证活动,同时生成子订单数据
gen_order_detail_info_func = _MAP_GEN_ORDER_DETAIL_FUNC.get(
promotion_attend.event_type, _gen_normal_order_detail_info
)
success, order_detail_info = gen_order_detail_info_func(
customer=order.customer,
item=item,
promotion_attend=promotion_attend,
)
if not success:
return False, order_detail_info
order_detail = create_order_detail(order, order_detail_info)
# 创建订单详情的过程中更新其优惠前的订单总价
order.amount_gross += order_detail.amount_gross
order.total_amount_gross += order_detail.amount_gross
# 创建订单扣减库存,同时记录库存变更记录
change_storage = -item["quantity"]
storage_record = update_product_storage_and_no_record(
item["product"],
order.customer.user.id,
change_storage,
ProductStorageRecordType.MALL_SALE,
ProductStorageRecordOperatorType.CUSTOMER,
order.order_num,
)
storage_record_list.append(storage_record)
storage_record_list = create_product_storage_records(storage_record_list)
return True, storage_record_list
def create_order_detail(order: Order, order_detail_info: dict):
"""
:param order:
:param order_detail_info: {
"product_id" product.id,
"quantity_gross": decimal,
"quantity_net": decimal,
"price_gross": decimal,
"price_net": decimal,
"amount_gross": decimal,
"amount_net": decimal,
"promotion_id": groupon.id or null
}
:return:
"""
order_info = {
"order_id": order.id,
"shop_id": order.shop.id,
"customer_id": order.customer.id,
"create_date": order.create_date,
"status": order.order_status,
"pay_type": order.pay_type,
}
order_detail_info.update(order_info)
order_detail = OrderDetail(**order_detail_info)
order_detail.save()
return order_detail
def _create_order_address(
address_info: dict, shop_id: int, delivery_method: int
):
"""
创建订单地址
:param address_info:
:param shop_id:
:param delivery_method:
:return:
"""
order_address = OrderAddress(**address_info)
# 自提订单更新成店铺地址
if delivery_method == OrderDeliveryMethod.CUSTOMER_PICK:
shop = get_shop_by_shop_id(shop_id)
order_address.province = shop.shop_province
order_address.city = shop.shop_city
order_address.county = shop.shop_county
order_address.address = shop.shop_address
order_address.save()
return order_address
def order_data_check(shop_id: int, user_id, args: dict):
"""
校验订单相关的数据
:param args:
:return: order_info
"""
# 基本参数校验,单个货品的总金额和订单金额的校验
order_amount = decimal.Decimal(0)
for item in args["cart_items"]:
if abs(item["quantity"] * item["price"] - item["amount"]) > 0.01:
return False, "货品id:{product_id},价格计算有误".format(product_id=item["product_id"])
order_amount += item["amount"]
if abs(order_amount + args["delivery_amount"] - args["total_amount"]) > 0.01:
return False, "订单金额计算有误"
# 配送方式关联参数校验
if args[
"delivery_method"
] == OrderDeliveryMethod.CUSTOMER_PICK and not args.get("delivery_period"):
return False, "客户自提订单自提时间段必传"
# 支付方式关联参数校验
if args["pay_type"] == OrderPayType.WEIXIN_JSAPI and not args.get("wx_openid"):
return False, "微信支付订单wx_openid必传"
# 检查客户,如不存在则创建客户
customer = get_customer_by_user_id_and_shop_id(user_id, shop_id)
if not customer:
customer = create_customer(user_id, shop_id)
# 运费相关校验
success, delivery_amount_gross = apply_promotion(
shop_id, args["delivery_method"], order_amount, args["delivery_amount"]
)
if not success:
return False, delivery_amount_gross
# 处理订单配送方式
success, delivery_period = _convert_delivery_period(args)
if not success:
return False, delivery_period
# 购物车货品校验
cart_items = args.get("cart_items")
product_ids = [item["product_id"] for item in cart_items]
products = list_product_by_ids(shop_id, product_ids)
map_products = {product.id: product for product in products}
for item in cart_items:
product = map_products.get(item.pop("product_id"))
if not product:
return False, "商品已下架, 看看别的商品吧"
if product.status != ProductStatus.ON:
return False, "商品已下架, 看看别的商品吧"
if product.storage < item["quantity"]:
return (
False,
"商品:「{product_name}」库存仅剩 {storage} !".format(product_name=product.name, storage=product.storage)
)
item["product"] = product
# 活动检查
promotion_type = args["promotion_type"]
assert promotion_type in _MAP_CHECK_PROMOTION_FUNC.keys()
check_promotion_func = _MAP_CHECK_PROMOTION_FUNC[promotion_type]
success, promotion_attend, promotion_attend_field= check_promotion_func(
shop_id=shop_id,
customer=customer,
promotion_attend_id=args["promotion_attend_id"],
)
if not success:
return False, promotion_attend
order_info = {
"delivery_method": args["delivery_method"],
"delivery_period": delivery_period,
"order_num": str(uuid.uuid1())[:8], # 临时单号,避免redis自增跳过单号
"pay_type": args["pay_type"],
"amount_gross": 0,
"amount_net": order_amount,
"delivery_amount_gross": delivery_amount_gross,
"delivery_amount_net": args["delivery_amount"],
"total_amount_gross": delivery_amount_gross,
"total_amount_net": order_amount + args["delivery_amount"],
"order_type": MAP_EVENT_ORDER_TYPE.get(promotion_type, OrderType.NORMAL),
"address": args.get("address"),
"customer_id": customer.id,
"shop_id": shop_id,
"promotion_attend": promotion_attend,
}
order_info.update(promotion_attend_field)
if args.get("remark"):
order_info["remark"] = args.get("remark")
return True, order_info
def count_paid_order(shop_id: int):
"""
获取一个店铺未处理(已支付)的订单
:param shop_id:
:return:
"""
count = (
Order.objects.filter(shop_id=shop_id, order_status=OrderStatus.PAID).count()
)
return count
def direct_pay(order: Order):
"""
直接支付完成流程
:param order:
:return:
"""
set_order_status_paid(order, OrderStatus.PAID)
# 增加消费额与积分
update_customer_consume_amount_and_count_and_point_by_consume(
order.customer.id, order.total_amount_net
)
paid_order_count = count_paid_order(order.shop.id)
publish_admin(order.shop.id, "new_order", {"count": paid_order_count})
# 订单打印
printer = get_printer_by_shop_id(order.shop.id)
if not printer or not printer.auto_print:
return True, order
success, order_for_print = get_shop_order_by_shop_id_and_id(order.shop.id, order.id)
if not success:
return True, order
success, _ = print_order(order_for_print, 0)
if not success:
return True, order
return True, order
def cancel_order(shop_id: int, order_id: int):
"""
取消订单
:param shop_id:
:param order_id:
:return:
"""
order = get_order_by_shop_id_and_id(shop_id, order_id)
if not order:
return False, "订单不存在"
elif order.order_status != OrderStatus.UNPAID:
return False, "订单状态已改变"
elif order.pay_type == OrderPayType.WEIXIN_JSAPI:
tag, ret_dict = payment_query(order)
if tag == 2:
create_order_transaction(
order.id,
ret_dict["out_trade_no"],
ret_dict["total_fee"],
ret_dict["channel_trade_no"],
)
set_order_paid(order)
return False, "订单已支付, 暂无法取消"
success, error_obj = set_order_status_canceled(order)
return success, error_obj
def payment_refund(order: Order):
"""
订单退款, 直接返回字典
:param order:
:return: 字典说明:out_trade_no,out_refund_no必有的;1,+msg;2,+total_fee
"""
success, pay_channel = get_pay_channel_by_shop_id(order.shop.id)
if not success:
return False, pay_channel
# 先检查一下用户的可用余额是否足够
query_dict = LcswFunds.queryWithdrawal(pay_channel.smerchant_no)
# 先全部退
refund_fee = int(round(order.total_amount_net * 100))
if query_dict["return_code"] == "01" and query_dict["result_code"] == "01":
not_settle_amt = int(query_dict["not_settle_amt"])
if (
round(not_settle_amt * 1000 / (1000 - pay_channel.clearing_rate))
< refund_fee
):
return False, "退款失败,当前账户{text}".format(text="余额不足")
else:
return (
False,
"订单退款外部请求失败: {text}".format(text=query_dict["return_msg"]),
)
order_transaction = get_order_transaction_by_order_id(order.id)
if order_transaction:
pay_type = "010"
parameters = LcswPay.getRefundParas(
pay_type,
order.order_num,
order_transaction.transaction_id,
str(refund_fee),
pay_channel.smerchant_no,
pay_channel.terminal_id1,
pay_channel.access_token,
)
else:
return False, "订单退款外部请求失败: {text}".format(text="找不到交易(SG:no-transaction)")
try:
r = requests.post(
LCSW_HANDLE_HOST + "/pay/100/refund",
data=json.dumps(parameters),
verify=False,
headers={"content-type": "application/json"},
timeout=(1, 10),
)
res_dict = json.loads(r.text)
except:
return False, "订单退款外部请求失败: {text}".format(text="退款接口超时或返回异常,请稍后再试(LC)")
# 响应码:01成功,02失败,响应码仅代表通信状态,不代表业务结果
if res_dict["return_code"] == "02":
return False, "订单退款外部请求失败: {text}".format(text=res_dict["return_msg"])
key_sign = res_dict["key_sign"]
str_sign = LcswPay.getStrForSignOfRefundRet(res_dict)
if key_sign != hashlib.md5(str_sign.encode("utf-8")).hexdigest().lower():
return False, "订单退款外部请求失败: {text}".format(text="签名有误")
# 业务结果:01成功 02失败
result_code = res_dict["result_code"]
if result_code == "02":
return False, "订单退款外部请求失败: {text}".format(text=res_dict["return_msg"])
else:
return True, None
def refund_order(
shop_id: int, order: Order, refund_type: int, user_id: int = 0
):
"""
订单退款
:param shop_id:
:param order:
:param refund_type:
:param user_id:
:return:
"""
if (
order.pay_type == OrderPayType.WEIXIN_JSAPI
and refund_type == OrderRefundType.WEIXIN_JSAPI_REFUND
):
success, result = payment_refund(order)
if not success:
if order.order_type == OrderType.GROUPON and user_id == 0:
set_order_status_refund_failed(order, user_id, result)
abnormal_order_count = count_abnormal_order(shop_id)
publish_admin(
shop_id, "abnormal_order", {"count": abnormal_order_count}
)
return False, result
set_order_status_refunded(order, user_id, refund_type)
return True, None
def set_order_status_refunded(order: Order, user_id: int, refund_type: int):
"""
将订单设为退款, 对应的子订单也设为退款, 商品库存回退
:param order:
:param user_id:
:param refund_type:
:return:
"""
order.order_status = OrderStatus.REFUNDED
order.refund_type = refund_type
order_detail_list = list_order_details_by_order_id(order.id)
order_detail_list.update(status=OrderStatus.REFUNDED, refund_type=refund_type)
storage_record_list = []
for order_detail in order_detail_list:
change_storage = order_detail.quantity_net
storage_record = update_product_storage_and_no_record(
order_detail.product,
order_detail.customer.user.id,
change_storage,
ProductStorageRecordType.ORDER_CANCEL,
ProductStorageRecordOperatorType.CUSTOMER,
order.order_num,
)
storage_record_list.append(storage_record)
create_product_storage_records(storage_record_list)
# 订单退款,客户积分同时也扣减
update_customer_consume_amount_and_point_by_refund(
order.customer.id, order.total_amount_net
)
# 生成操作记录
log_info = {
"order_id":order.id,
"order_num": order.order_num,
"shop_id": order.shop.id,
"operator_id": user_id,
"operate_type": OrderLogType.REFUND,
}
create_order_log(log_info)
order.save()
def set_order_paid(order: Order):
"""
将订单设置为已支付状态,同时生成一些其他的数据
:param order:
:return:
"""
assert order.order_status == OrderStatus.UNPAID
# 后续订单可能有多种类型
if order.order_type == OrderType.NORMAL:
return direct_pay(order)
def set_order_status_paid(order: Order, order_status: int):
"""
将订单设为支付, status仅为paid, waitting
:param order:
:param order_status:
:return:
"""
assert order_status in [OrderStatus.PAID, OrderStatus.WAITTING]
order.order_status = order_status
order_detail_list = list_order_details_by_order_id(order.id)
order_detail_list.update(status=order_status)
order.save()
def set_order_status_canceled(order: Order):
"""
将订单设为取消, 对应的子订单也设为取消, 商品库存回退
:param order:
:return:
"""
customer = get_customer_by_customer_id_and_shop_id(order.customer.id, order.shop.id)
order.order_status = OrderStatus.CANCELED
order_detail_list = list_order_details_by_order_id(order.id)
order_detail_list.update(status=OrderStatus.CANCELED)
storage_record_list = []
for order_detail in order_detail_list:
change_storage = order_detail.quantity_net
storage_record = update_product_storage_and_no_record(
order_detail.product,
customer.user.id,
change_storage,
ProductStorageRecordType.ORDER_CANCEL,
ProductStorageRecordOperatorType.CUSTOMER,
order.order_num,
)
storage_record_list.append(storage_record)
create_product_storage_records(storage_record_list)
order.save()
return True, None
def set_order_status_confirmed_finish(
order: Order, order_status: int, user_id: int, operate_type: int
):
"""
将订单的状态设为开始处理, 已完成
:param order:
:param order_status:
:param user_id:
:param operate_type:
:return:
"""
assert order_status in [OrderStatus.CONFIRMED, OrderStatus.FINISHED]
order.order_status = order_status
order_detail_list = list_order_details_by_order_id(order.id)
order_detail_list.update(status=order_status)
# 生成操作记录
log_info = {
"order_id": order.id,
"order_num": order.order_num,
"shop_id": order.shop.id,
"operator_id": user_id,
"operate_type": operate_type,
}
create_order_log(log_info)
order.save()
def set_order_status_refund_failed(
order: Order, user_id: int, error_text: str
):
"""
将订单设置为退款失败, 对应的子订单设置为退款失败
:param order:
:param user_id:
:param error_text:
:return:
"""
order.order_status = OrderStatus.REFUND_FAIL
order_detail_list = list_order_details_by_order_id(order.id)
order_detail_list.update(status=OrderStatus.REFUND_FAIL)
if error_text in [
"店铺未开通线上支付",
"店铺支付渠道错误",
]:
operate_content = "支付通道错误"
elif error_text == "退款失败,当前账户{text}".format(text="余额不足"):
operate_content = "余额不足"
else:
operate_content = "其他"
log_info = {
"order_id":order.id,
"shop_id": order.shop.id,
"operator_id": user_id,
"order_num": order.order_num,
"operate_type": OrderLogType.REFUND_FAIL,
"operate_content": operate_content,
}
create_order_log(log_info)
order.save()
<file_sep>/wsc_django/wsc_django/apps/config/urls.py
"""
店铺设置相关路由
"""
from django.urls import path
from config import views
urlpatterns_admin = [
path('api/admin/config/shop-info/', views.AdminConfigShopInfoView.as_view()), # 店铺信息获取
path('api/admin/config/print-info/', views.AdminConfigPrintInfoView.as_view()), # 打印信息获取
path('api/admin/config/msg-notify/', views.AdminConfigMsgNotifyView.as_view()), # 获取消息通知设置&设置消息通知
path('api/admin/config/shop/img/', views.AdminConfigShopImgView.as_view()), # 修改店铺logo
path('api/admin/config/shop/name/', views.AdminConfigShopNameView.as_view()), # 修改店铺名
path('api/admin/config/shop/phone/', views.AdminConfigShopPhoneView.as_view()), # 修改店铺联系方式
path('api/admin/config/shop/address/', views.AdminConfigShopAddressView.as_view()), # 修改店铺地址
path('api/admin/config/printer/', views.AdminConfigPrinterView.as_view()), # 修改打印机设置
path(
'api/admin/config/receipt/bottom-msg/', views.AdminConfigReceiptBottomMsgView.as_view()
), # 小票底部信息设置
path(
'api/admin/config/receipt/bottom-qrcode/', views.AdminConfigReceiptBottomQrcodeView.as_view()
), # 小票底部二维码设置
path(
'api/admin/config/receipt/brcode-active/', views.AdminConfigReceiptBrcodeActiveView.as_view()
), # 打印订单号条码
path(
'api/admin/config/receipt/copies/', views.AdminConfigReceiptCopiesView.as_view()
), # 打印订单号条码
path(
'api/admin/shop/pay-mode-config/', views.AdminPayModeConfigView.as_view()
), # 店铺支付方式设置按钮
path('api/admin/shop/some-config/', views.AdminSomeConfigView.as_view()), # 店铺的一些奇怪设置按钮
path('api/admin/config/shop-setup/', views.AdminConfigShopSetupView.as_view()), # 获取店铺常规设置信息
path(
'api/admin/config/custom-title-name/', views.AdminConfigCustomTitleNameView.as_view()
), # 修改店铺分享信息中的自定义标题名称
path(
'api/admin/config/custom-share-description/', views.AdminConfigCustomShareDescriptionView.as_view()
), # 修改店铺分享信息中的自定义分享描述
path('api/config/wechat/jsapi-signature/', views.WechatJsapiSigntureView.as_view()), # 获取微信jsapi
path('api/qiniu/img-token/', views.QiniuImgTokenView.as_view()), # 获取七牛的上传照片token
]
urlpatterns_mall = [
path('api/mall/tencent/COS/credential/', views.TencentCOSCredential.as_view()), # 获取微信jsapi
]
urlpatterns = urlpatterns_mall + urlpatterns_admin
<file_sep>/wsc_django/wsc_django/apps/staff/migrations/0001_initial.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('shop', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Staff',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('roles', models.SmallIntegerField(default=0, verbose_name='角色,二进制运算进行校验')),
('permissions', models.BigIntegerField(default=0, verbose_name='权限,二进制运算进行校验')),
('status', models.SmallIntegerField(default=1, verbose_name='员工状态,0:删除,1:正常')),
('position', models.CharField(default='无', max_length=16, verbose_name='员工职位')),
('entry_date', models.DateField(auto_now_add=True, verbose_name='员工入职时间')),
('remark', models.CharField(default='', max_length=32, verbose_name='备注')),
],
options={
'verbose_name': '员工',
'verbose_name_plural': '员工',
'db_table': 'staff',
},
),
migrations.CreateModel(
name='StaffApply',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('status', models.SmallIntegerField(default=1, verbose_name='申请状态,0:未申请,1;申请中,2:已通过')),
('expired', models.SmallIntegerField(default=0, verbose_name='申请信息是否过期,0:未过期,1:已过期')),
('shop', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shop.shop', verbose_name='对应的商铺对象')),
],
options={
'verbose_name': '员工申请表',
'verbose_name_plural': '员工申请表',
'db_table': 'staff_apply',
},
),
]
<file_sep>/wsc_django/wsc_django/apps/product/migrations/0001_initial.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('name', models.CharField(max_length=64, verbose_name='货品名称')),
('name_acronym', models.CharField(max_length=64, verbose_name='货品名称拼音')),
('price', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='货品单价')),
('storage', models.DecimalField(decimal_places=4, default=0, max_digits=13, verbose_name='货品库存')),
('code', models.CharField(default='', max_length=32, verbose_name='货品编码')),
('summary', models.CharField(default='', max_length=128, verbose_name='货品简介')),
('cover_image_url', models.CharField(default='', max_length=512, verbose_name='货品封面图')),
('description', models.TextField(default='', verbose_name='货品详情描述')),
('status', models.SmallIntegerField(default=1, verbose_name='货品状态, 0:删除, 1:上架, 2:下架')),
],
options={
'verbose_name': '货品',
'verbose_name_plural': '货品',
'db_table': 'product',
},
),
migrations.CreateModel(
name='ProductPicture',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('image_url', models.CharField(max_length=512, verbose_name='货品轮播图url')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product.product', verbose_name='对应货品对象')),
],
options={
'verbose_name': '货品轮播图',
'verbose_name_plural': '货品轮播图',
'db_table': 'product_picture',
},
),
migrations.CreateModel(
name='ProductGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('name', models.CharField(max_length=32, verbose_name='商品分组名称')),
('description', models.CharField(default='', max_length=128, verbose_name='商品分组描述')),
('sort', models.IntegerField(null=True, verbose_name='商品分组排序')),
('level', models.SmallIntegerField(null=True, verbose_name='商品分组级别')),
('default', models.SmallIntegerField(default=0, verbose_name='是否为默认分组, 0:否,1:是')),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='product.productgroup', verbose_name='该商品分组的父级ID')),
],
options={
'verbose_name': '货品分组',
'verbose_name_plural': '货品分组',
'db_table': 'product_group',
},
),
]
<file_sep>/wsc_django/wsc_django/apps/shop/constant.py
# 商铺支付渠道
class ShopPayChannelType:
LCSW = 1
CCB = 2
# 商铺状态
class ShopStatus:
REJECTED = 3 # 已拒绝
CHECKING = 2 # 审核中
NORMAL = 1 # 已通过,正常
CLOSED = 0 # 店铺已关闭
# 商铺是否开通认证
class ShopVerifyActive:
NO = 0 # 未认证
YES = 1 # 已认证
CHECKING = 2 # 审核中
REJECTED = 3 # 已拒绝
# 商铺类型,个人or企业
class ShopVerifyType:
ENTERPRISE = 0 # 企业
INDIVIDUAL = 1 # 个人
# 商铺是否开通支付
class ShopPayActive:
REJECTED = 3 # 已拒绝
CHECKING = 2 # 审核中
YES = 1 # 已开通
NO = 0 # 未开通
# 商铺公众号是否开启
class ShopMpMpActive:
YES = 1
NO = 0
# 商铺公众号服务类型
class ShopMpServiceType:
SUBSCRIBE = 1
OLD_SUBSCRIBE = 2
SERVICE = 3
# 商铺授权森果是否开启
class ShopMpAuthorizeActive:
AUTH_NOT_BIND = 2 # 未绑定,但是微信后台为解绑,实际还可以用,只是在森果显示未绑定(已授权)
AUTH = 1 # 已绑定(已授权)
NOT_AUTH = 0 # 真的未绑定(未授权)
# 商铺微信认证类型
class ShopMpVerifyType:
NOT_BIND = -1 # 未绑定
NOT_VERIFY = 0 # 未验证
MP_VERIFY = 1 # 微信认证
SINA_VERIFY = 2 # 新浪微博认证
T_QQ_VERIFY = 3 # 腾讯微博认证
VERIFY_BUT_NOT_NAME = 4 # 已认证但未通过名称认证
VERIFY_BUT_NOT_NAME_BUT_SINA = 5 # 已认证但未通过名称认证,但通过了新浪微博认证
VERIFY_BUT_NOT_NAME_BUT_T_QQ = 6 # 已认证但未通过名称认证,但通过了腾讯微博认证
<file_sep>/wsc_django/wsc_django/apps/ws/consumers.py
import asyncio
import aioredis
import json
from channels.generic.websocket import WebsocketConsumer
from django.core import signing
from asgiref.sync import async_to_sync
from settings import REDIS_SERVER, REDIS_PORT
from ws.constant import CHANNEL_ADMIN
RAISE_ERROR = object()
class AdminWebSocketConsumer(WebsocketConsumer):
"""后台websocket"""
def connect(self):
async_to_sync(self.channel_layer.group_add)(
CHANNEL_ADMIN, self.channel_name
)
self.accept()
def disconnect(self, close_code):
async_to_sync(self.channel_layer.group_discard)(
CHANNEL_ADMIN, self.channel_name
)
def send_message(self, event):
# 服务器向前端发送信息
message = event['message']
self.receive(message)
def receive(self, text_data=None, bytes_data=None):
if text_data == "ping":
self.send(json.dumps({"event": "pong", "data": "pong"}))
else:
key = "wsc_shop_id"
salt = "hzh_wsc_shop_id"
default = RAISE_ERROR
cookie_value = self.scope["cookies"].get(key)
try:
cookie_shop_id = signing.get_cookie_signer(salt=key + salt).unsign(
cookie_value, max_age=None)
except signing.BadSignature:
if default is not RAISE_ERROR:
return default
else:
raise
if str(text_data["shop_id"]) == str(cookie_shop_id):
self.send(json.dumps({"event": text_data["event"], "data": text_data["data"]}))<file_sep>/wsc_django/wsc_django/apps/groupon/models.py
import datetime
import decimal
from django.db import models
from customer.models import Customer
from groupon.constant import GrouponStatus, GrouponAttendStatus, GrouponType, GrouponAttendLineStatus
from product.models import Product
from promotion.abstract import AbstractPromotionRule
from shop.models import Shop
from wsc_django.utils.models import TimeBaseModel
class Groupon(TimeBaseModel):
"""拼团活动模型类"""
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, null=False, verbose_name="对应的店铺对象")
product = models.ForeignKey(Product, on_delete=models.CASCADE, null=False, verbose_name="对应的货品对象")
price = models.DecimalField(max_digits=13, decimal_places=4, null=False, verbose_name="商品拼团价格")
from_datetime = models.DateTimeField(null=False, verbose_name="拼团活动开始时间")
to_datetime = models.DateTimeField(null=False, verbose_name="拼团活动结束时间")
groupon_type = models.SmallIntegerField(null=False, verbose_name="拼团活动类型 1:普通 2:老带新")
success_size = models.SmallIntegerField(null=False, default=1, verbose_name="成团人数")
quantity_limit = models.IntegerField(
null=False, default=0, verbose_name="购买数量上限(限制每个订单的购买数量)"
)
success_limit = models.IntegerField(
null=False, default=0, verbose_name="成团数量上限(限制单次活动的最大成团数量)"
)
attend_limit = models.IntegerField(
null=False, default=0, verbose_name="参团数量上限(每个用户能参加同一拼团的次数)"
)
success_valid_hour = models.IntegerField(
null=False, default=24, verbose_name="开团有效时间(超过此时间未成团的活动将自动解散)"
)
status = models.SmallIntegerField(
null=False, default=GrouponStatus.ON, verbose_name="拼团活动状态 1:启用 2:停用 3:过期"
)
succeeded_count = models.IntegerField(null=False, default=0, verbose_name="成团数")
succeeded_quantity = models.DecimalField(
max_digits=13, decimal_places=4, null=False, default=0, verbose_name="成团件数"
)
is_editable = models.BooleanField(null=False, default=True, verbose_name="是否可以编辑")
class Meta:
db_table = "groupon"
verbose_name = "拼团活动"
verbose_name_plural = verbose_name
def set_expired(self):
self.status = GrouponStatus.EXPIRED
def set_uneditable(self):
self.is_editable = False
class GrouponAttend(TimeBaseModel, AbstractPromotionRule):
"""拼团参与表"""
groupon = models.ForeignKey(Groupon, null=False, on_delete=models.CASCADE, verbose_name="拼团活动")
size = models.IntegerField(null=False, default=0, verbose_name="拼团当前参与人数")
anonymous_size = models.IntegerField(null=False, default=0, verbose_name="强制成团添加的匿名用户数量")
success_size = models.IntegerField(null=False, verbose_name="成团人数")
to_datetime = models.DateTimeField(null=False, verbose_name="拼团参与结束时间")
status = models.SmallIntegerField(
null=False,
default=GrouponAttendStatus.CREATED,
verbose_name="拼团参与状态 -1: 超时未支付 0:已创建 1:拼团中 2:已成团 3:已失败"
)
failed_reason = models.CharField(max_length=64, null=False, default="", verbose_name="失败原因")
class Meta:
db_table = "groupon_attend"
verbose_name = "拼团参与表"
verbose_name_plural = verbose_name
def is_sponsor(self, customer) -> bool:
""" 客户是否是团长 """
return customer.id == self.sponsor_detail.customer_id
def calculate(self) -> decimal.Decimal:
""" 拼团价格优惠计算, 返回优惠后的价格 """
return self.groupon.price
def set_succeeded(self):
self.status = GrouponAttendStatus.SUCCEEDED
def set_failed(self, reason: str):
self.status = GrouponAttendStatus.FAILED
self.failed_reason = reason
def limit(self, customer, quantity_net) -> tuple:
"""开团、参团过程中的校验"""
if self.size + 1 > self.success_size:
return False, "本团已满员, 去看看其他团吧"
if (
self.groupon.to_datetime <= datetime.datetime.now()
or self.groupon.status != GrouponStatus.ON
):
return False, "活动已结束, 看看其他商品吧"
# 团长支付后才倒计时, 团长不做过期验证, 活动过期在上面有验证,会被拦截
if (not self.is_sponsor(customer)) and (
self.to_datetime <= datetime.datetime.now()
or (self.status != GrouponAttendStatus.WAITTING)
):
return False, "本团已过期,去看看其他团吧"
if (
self.groupon.success_limit
and self.groupon.succeeded_count >= self.groupon.success_limit
):
return False, "成团数已达到上限, 去参加其他团吧"
if (
self.groupon.groupon_type == GrouponType.MENTOR
and not customer.is_new_customer()
and not self.is_sponsor(customer)
):
return False, "邀新团仅允许新用户参与"
if self.groupon.quantity_limit and quantity_net > self.groupon.quantity_limit:
return (
False,
"本团限购{quantity_limit}, 请减少购买数量后再试".format(
quantity_limit=self.groupon.quantity_limit
),
)
# 是否已经参团判断
attend_detail = (
GrouponAttendDetail.objects.filter(
groupon_attend_id=self.id,
customer_id=customer.id,
status__in=[GrouponAttendLineStatus.PAID, GrouponAttendLineStatus.UNPAID],
)
.first()
)
if attend_detail and not self.is_sponsor(customer):
return False, "您已经参加过该团"
# 参团数量限制
if self.groupon.attend_limit:
total_attend_count = (
GrouponAttendDetail.objects.filter(
groupon_attend__groupon_id=self.groupon.id,
groupon_attend__groupon_status__in=[GrouponAttendStatus.WAITTING, GrouponAttendStatus.SUCCEEDED],
customer_id=customer.id,
status__in=[GrouponAttendLineStatus.PAID, GrouponAttendLineStatus.UNPAID]
)
.count()
)
if total_attend_count >= self.groupon.attend_limit:
return (
False,
"最多参加{attend_limit}次, 您已达到上限".format(
attend_limit=self.groupon.attend_limit
),
)
return True, ""
class GrouponAttendDetail(TimeBaseModel):
"""拼团参与详情表"""
groupon_attend = models.ForeignKey(
GrouponAttend, on_delete=models.CASCADE, null=False, verbose_name="拼团参与"
)
customer = models.ForeignKey(Customer, on_delete=models.CASCADE, null=False, verbose_name="参与客户")
is_sponsor = models.BooleanField(null=False, default=True, verbose_name="是否是团长")
is_new_customer = models.BooleanField(null=False, default=False, verbose_name="是否是新用户")
status = models.SmallIntegerField(
null=False, default=GrouponAttendLineStatus.UNPAID, verbose_name="参团状态 -1:超时未支付 0:未支付 1:已支付"
)
class Meta:
db_table = "groupon_attend_detail"
verbose_name = "拼团参与详情表"
verbose_name_plural = verbose_name<file_sep>/wsc_django/wsc_django/utils/arguments.py
import json
from webargs.fields import Field
from webargs import ValidationError
from wsc_django.utils.authenticate import SimpleEncrypt
class StrToList(Field):
"""字符串转列表list(int)"""
def __init__(self, split_str: str = ",", if_int: bool = True, **kwargs):
super().__init__(**kwargs)
self.split_str = split_str
self.trans_type = int if if_int else str
def _deserialize(self, value, attr, data, **kwargs):
if not isinstance(value, (str, bytes)):
raise self.make_error("invalid")
try:
res_list = []
if isinstance(value, bytes):
value = value.decode("utf-8")
value = value.replace(" ", "")
value_list = value.split(self.split_str) if value else []
for v in value_list:
res_list.append(self.trans_type(v))
return res_list
except Exception as e:
raise ValidationError(str(e))
class StrToDict(Field):
"""字符串转dict"""
def _deserialize( self, value, attr, data, **kwargs ):
if isinstance(value, dict):
return value
elif isinstance(value, str):
try:
return json.loads(value)
except Exception as e:
raise ValidationError("value error must json str")
else:
raise ValidationError("value type error")
class DecryptPassword(Field):
def _deserialize(self, value, attr, data, **kwargs ):
if isinstance(value, str):
try:
return SimpleEncrypt.decrypt(value)
except Exception as e:
raise ValidationError("decrypt password fail")
else:
raise ValidationError("value type error")<file_sep>/wsc_django/wsc_django/apps/staff/migrations/0002_auto_20210606_2054.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('shop', '0002_auto_20210606_2054'),
('staff', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='staffapply',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='对应的用户对象'),
),
migrations.AddField(
model_name='staff',
name='shop',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shop.shop', verbose_name='员工对应的商铺对象'),
),
migrations.AddField(
model_name='staff',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='员工对应的用户对象'),
),
]
<file_sep>/wsc_django/wsc_django/apps/customer/migrations/0004_mineaddress_added.py
# Generated by Django 3.1.6 on 2021-06-08 03:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('customer', '0003_auto_20210606_2054'),
]
operations = [
migrations.AddField(
model_name='mineaddress',
name='added',
field=models.CharField(max_length=50, null=True, verbose_name='补充说明,可以填写门牌号等信息'),
),
]
<file_sep>/wsc_django/wsc_django/utils/authenticate.py
"""验证相关"""
import base64
from Crypto.Cipher import DES
from rest_framework.authentication import BaseAuthentication
from rest_framework import exceptions
class WSCIsLoginAuthenticate(BaseAuthentication):
"""微商城是否登录验证"""
def authenticate_header(self, request):
"""不设置验证头就会返回状态码403"""
return "wsc_login_auth"
def authenticate(self, request):
if not request.current_user:
raise exceptions.AuthenticationFailed('用户未登录')
class WSCAuthenticate(BaseAuthentication):
"""微商城权限验证"""
def authenticate(self, request, username=None, password=None, **kwargs):
sign = request.data.get("sign")
key = SimpleEncrypt.decrypt(sign)
key_list = key.split("@")
params = ("passport_id", "timestamp")
if len(key_list) != len(params):
return None
for index, v in enumerate(params):
if key_list[index] != str(request.get(v)):
return None
class EncryptBase:
"""
调用示例
加密:SimpleEncrypt.encrypt('10930')
返回:'41c836605df56a81'
解密:SimpleEncrypt.decrypt('41c836605df56a81')
返回:'10930'
"""
asciiCode = 2
padding = chr(asciiCode)
@classmethod
def pad(cls, text):
"""
length需和key长度相等
"""
while len(text) % cls.crypt_len != 0:
text += cls.padding
return text.encode()
@classmethod
def encrypt(cls, text):
"""
参数: text-待加密字符串
key-DES需要的秘钥
"""
if not isinstance(text, str):
text = str(text)
des = DES.new(cls.crypt_key, DES.MODE_ECB)
padded_text = cls.pad(text)
encrypted_text = des.encrypt(padded_text)
return base64.b64encode(encrypted_text).decode()
@classmethod
def decrypt(cls, text):
if not isinstance(text, str):
text = str(text)
try:
encrypted_text = base64.b64decode(text)
except:
return "0"
des = DES.new(cls.crypt_key, DES.MODE_ECB)
return des.decrypt(encrypted_text).decode()[0:15].strip(cls.padding)
@classmethod
def decrypt_to_int(cls, text):
try:
text = int(cls.decrypt(text))
except:
text = 0
return text
@classmethod
def decrypt_to_list(cls, text):
try:
text = cls.decrypt(text)
text_list = text.split(",")
res_list = []
for data in text_list:
try:
data = int(data)
except:
continue
res_list.append(data)
except:
res_list = []
return res_list
class SimpleEncrypt(EncryptBase):
"""
用于微商城系统数据加密
"""
crypt_key = 'MRhGeb5T'.encode()
crypt_len = 8<file_sep>/wsc_django/wsc_django/apps/customer/serializers.py
from rest_framework import serializers
from customer.constant import CUSTOMER_POINT_TYPE
from customer.services import create_mine_address, check_default_address
from wsc_django.utils.constant import DateFormat
from wsc_django.utils.core import FuncField, FormatAddress
from user.serializers import UserSerializer
class AdminCustomerSerializer(UserSerializer):
"""后台客户序列化器类"""
customer_id = serializers.IntegerField(source="id", label="客户id")
consume_amount = FuncField(lambda value: round(float(value), 2), label="客户消费金额")
consume_count = serializers.IntegerField(label="客户消费次数")
point = FuncField(lambda value: round(float(value), 2), label="客户积分")
remark = serializers.CharField(label="客户备注")
realname = serializers.CharField(required=False, label="用户真实姓名")
nickname = serializers.CharField(required=False, label="微信昵称")
sex = serializers.IntegerField(required=False, label="性别")
phone = serializers.CharField(required=False, label="手机号")
birthday = serializers.DateField(required=False, format=DateFormat.DAY, default="", label="用户生日")
head_image_url = serializers.CharField(required=False, label="头像")
class AdminCustomerPointsSerializer(serializers.Serializer):
"""后台客户积分序列化器类"""
create_time = serializers.DateTimeField(format=DateFormat.TIME, label="操作时间")
type = FuncField(lambda value: CUSTOMER_POINT_TYPE.get(value), label="操作类型")
point_change = FuncField(lambda value: round(float(value), 2), label="积分变更值")
current_point = FuncField(lambda value: round(float(value), 2), label="当前积分")
class MallMineAddressSerializer(serializers.Serializer):
"""商城端我的地址序列化器类"""
address_id = serializers.IntegerField(read_only=True, source="id", label="地址id")
name = serializers.CharField(label="收货人姓名")
sex = serializers.IntegerField(label="收货人性别")
phone = serializers.IntegerField(label="收货人电话")
province = serializers.IntegerField(label="省份编号")
city = serializers.IntegerField(label="城市编号")
county = serializers.IntegerField(label="区编号")
address = serializers.CharField(label="详细地址")
default = serializers.IntegerField(label="是否为默认地址")
added = serializers.CharField(required=False, allow_blank=True, label="补充说明")
longitude = serializers.FloatField(required=False, label="经度")
latitude = serializers.FloatField(required=False, label="纬度")
def validate(self, attrs):
"""验证省市区编号是否合法"""
province = attrs.get("province")
city = attrs.get("city")
county = attrs.get("county")
res = FormatAddress.check_code([province, city, county])
if not res:
raise serializers.ValidationError("省市区编号不合法")
return attrs
def create(self, validated_data):
user = self.context["self"].current_user
shop = self.context["self"].current_shop
if 'default' in validated_data.keys() and validated_data['default']:
check_default_address(user.id, shop.id)
mine_address = create_mine_address(validated_data, user.id, shop.id)
return mine_address
def update(self, instance, validated_data):
user = self.context["self"].current_user
shop = self.context["self"].current_shop
if 'default' in validated_data.keys() and validated_data['default']:
check_default_address(user.id, shop.id)
for k, v in validated_data.items():
setattr(instance, k, v)
instance.save()
return instance
<file_sep>/wsc_django/wsc_django/apps/demo/serializers.py
from rest_framework import serializers
class DemoSerializer(serializers.Serializer):
realname = serializers.CharField()
id = serializers.PrimaryKeyRelatedField(read_only=True)
<file_sep>/wsc_django/uwsgi.ini
[uwsgi]
#使用nginx连接时使用,Django程序所在服务器地址
socket=10.0.4.5:8001
#直接做web服务器使用,Django程序所在服务器地址
#http=10.211.55.2:8000
#项目目录
chdir=/root/wsc_django/wsc_django
#项目中wsgi.py文件的目录,相对于项目目录
wsgi-file=wsc_django/wsgi.py
# 进程数
processes=4
# 线程数
threads=2
# uwsgi服务器的角色
master=True
# 存放进程编号的文件
pidfile=uwsgi.pid
# 日志文件,因为uwsgi可以脱离终端在后台运行,日志看不见。我们以前的runserver是依赖终端的
daemonize=uwsgi.log
# 指定依赖的虚拟环境
virtualenv=/root/.virtualenvs/wsc_django
<file_sep>/wsc_django/wsc_django/apps/user/urls.py
"""
用户相关的路由
"""
from django.urls import path, re_path
from user import views
urlpatterns_admin = [
path('api/super/user/authorization/', views.AdminUserAuthorizationView.as_view()), # 总后台后台验证登录状态
path('api/super/user/', views.SuperUserView.as_view()), # 总后台获取用户详情&修改用户基本信息
path('api/super/user/phone/', views.SuperUserPhoneView.as_view()), # 总后台修改用户手机号
path('api/super/user/password/', views.SuperUserPasswordView.as_view()), # 总后台修改密码
path('api/super/user/email/', views.SuperUserEmailView.as_view()), # 总后台验证邮箱&b绑定邮箱&激活邮箱
path('api/admin/user/', views.AdminUserView.as_view()), # 后台用户登录注册
path('api/admin/user/logout/', views.AdminUserLogoutView.as_view()), # 后台退出登录
path('api/admin/user/sms_code/', views.SMSCodeView.as_view()), # 后台发送短信验证码
]
urlpatterns_mall = [
re_path(r'^api/mall/(?P<shop_code>\w+)/user/$', views.MallUserView.as_view()), # 商城端用户登录
re_path(r'^api/mall/(?P<shop_code>\w+)/user/register/$', views.MallUserRegisterView.as_view()), # 商城端用户注册
re_path(r'^api/mall/(?P<shop_code>\w+)/user/authorization/$', views.MallUserAuthorizationView.as_view()), # 商城端验证登录状态
path('api/mall/sms_code/', views.SMSCodeView.as_view()), # 商城端短信验证码
]
urlpatterns = urlpatterns_admin + urlpatterns_mall
<file_sep>/wsc_django/wsc_django/apps/delivery/views.py
import datetime
from webargs import fields, validate
from webargs.djangoparser import use_args
from delivery.serializers import AdminDeliveryConfigSerializer
from delivery.services import get_delivery_config_by_shop_id, update_delivery_config
from wsc_django.utils.views import AdminBaseView, MallBaseView
class AdminDeliveryConfigView(AdminBaseView):
"""后台-订单-获取配送设置"""
def get(self, request):
success, delivery_config = get_delivery_config_by_shop_id(self.current_shop.id)
if not success:
return self.send_fail(error_text=delivery_config)
serializer = AdminDeliveryConfigSerializer(delivery_config)
return self.send_success(data=serializer.data)
class AdminDeliveryConfigHomeView(AdminBaseView):
"""后台-订单-送货上门设置"""
@use_args(
{
"home_minimum_order_amount": fields.Float(
required=True, comment="配送模式起送金额"
),
"home_delivery_amount": fields.Float(required=True, comment="配送模式配送费"),
"home_minimum_free_amount": fields.Float(
required=True, comment="配送模式免配送费最小金额"
),
},
location="json"
)
def put(self, request, args):
import time
t1 = time.time()
success, msg = update_delivery_config(
self.current_shop.id, args, self.current_user.id
)
if not success:
return self.send_fail(error_text=msg)
print(time.time()-t1)
return self.send_success()
class AdminDeliveryConfigPickView(AdminBaseView):
"""后台-订单-自提设置"""
# 参数校验
def validate_time(self):
try:
datetime.datetime.strptime(self, "%H:%M")
except Exception:
return False
return True
@use_args(
{
"pick_service_amount": fields.Float(required=True, comment="自提模式服务费"),
"pick_minimum_free_amount": fields.Float(
required=True, comment="自提模式免服务费最小金额"
),
"pick_today_on": fields.Boolean(required=True, comment="今天自提是否开启"),
"pick_tomorrow_on": fields.Boolean(required=True, comment="明天自提是否开启"),
"pick_periods": fields.Nested(
{
"from_time": fields.String(
required=True, comment="自提起始时间", validate=validate_time
),
"to_time": fields.String(
required=True, comment="自提终止时间", validate=validate_time
),
},
many=True,
validate=[validate.Length(1)],
unknown=True,
comment="自提时段",
),
},
location="json"
)
def put(self, request, args):
success, msg = update_delivery_config(
self.current_shop.id, args, self.current_user.id
)
if not success:
return self.send_fail(error_text=msg)
return self.send_success()
class AdminDeliveryConfigMethodView(AdminBaseView):
"""后台-订单-开启/关闭配送或者自提"""
@use_args(
{
"home_on": fields.Boolean(comment="配送模式是否开启"),
"pick_on": fields.Boolean(comment="自提模式是否开启"),
},
location="json"
)
def put(self, request, args):
if not args:
return self.send_fail(error_text="请选择配送设置项目")
success, msg = update_delivery_config(self.current_shop.id, args)
if not success:
return self.send_fail(error_text=msg)
return self.send_success()<file_sep>/wsc_django/wsc_django/apps/ws/services.py
import json
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from ws.constant import CHANNEL_ADMIN
def publish_admin(shop_id: int, event: str, data: dict):
"""
:param shop_id: 商铺id,在接收端控制
:param event: str, 事件名
:param data: `事件中需要发布的数据,为简便,暂为字符串`
:return:
"""
message = {"shop_id": shop_id, "event": event, "data": data}
channels_layer = get_channel_layer()
send_dic = {
"type": "send.message",
"message": message,
}
async_to_sync(channels_layer.group_send)(CHANNEL_ADMIN, send_dic)<file_sep>/wsc_django/wsc_django/apps/storage/views.py
from webargs import fields, validate
from webargs.djangoparser import use_args
from storage.serializers import AdminProductStorageRecordsSerializer
from storage.services import list_product_storage_record_by_product_id
from wsc_django.utils.pagination import StandardResultsSetPagination
from wsc_django.utils.views import AdminBaseView
class AdminProductStorageRecordsView(AdminBaseView):
"""后台-货品-货品库存变更"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_PRODUCT]
)
@use_args(
{
"product_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="货品ID"
),
},
location="query"
)
def get(self, request, args):
shop_id = self.current_shop.id
product_storage_record_list = list_product_storage_record_by_product_id(
shop_id, **args
)
product_storage_record_list = self._get_paginated_data(
product_storage_record_list, AdminProductStorageRecordsSerializer
)
return self.send_success(data_list=product_storage_record_list)
<file_sep>/wsc_django/wsc_django/apps/storage/migrations/0001_initial.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('product', '0002_auto_20210606_2054'),
('shop', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ProductStorageRecord',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('operator_type', models.SmallIntegerField(default=1, verbose_name='操作人类型,1:员工,2:客户')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='货品库存变更记录创建时间')),
('type', models.SmallIntegerField(default=2, verbose_name='货品库存记录变更类型')),
('change_storage', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='本次操作变更量')),
('current_storage', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='历史时刻当前库存')),
('order_num', models.CharField(max_length=20, verbose_name='订单号')),
('status', models.SmallIntegerField(default=1, verbose_name='状态')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product.product', verbose_name='对应货品对象')),
('shop', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shop.shop', verbose_name='对应的店铺对象')),
],
options={
'verbose_name': '货品库存变更记录',
'verbose_name_plural': '货品库存变更记录',
'db_table': 'product_storage_record',
},
),
]
<file_sep>/wsc_django/wsc_django/utils/sms.py
"""短信第三方接口"""
import random
import requests
from qcloudsms_py import SmsSingleSender
from wsc_django.apps.settings import (
TENCENT_SMS_APPID,
TENCENT_SMS_APPKEY,
YUNPIAN_SYSTEM_APIKEY,
)
# 短信签名
yunpian_sms_common_sign = "【志浩web开发】"
tencent_sms_common_sign = "【志浩web开发】"
class YunPianSms:
"""云片短信"""
# 服务地址
sms_host = "sms.yunpian.com"
voice_host = "voice.yunpian.com"
# 版本号
version = "v2"
# 模板短信接口的URI
sms_tpl_send_uri = "/{}/sms/tpl_single_send.json".format(version)
sms_text_send_uri = "/{}/sms/single_send.json".format(version)
# 获取短信接口的URI
sms_short_url_uri = "/{}/short_url/shorten.json".format(version)
# 营销短信群发URI
sms_marketing_group_send_uri = "/{}/sms/tpl_batch_send.json".format(version)
@classmethod
def tpl_send_sms(cls, tpl_id, tpl_value, mobile, apikey=YUNPIAN_SYSTEM_APIKEY):
"""
模板接口发短信(模版id传入)
"""
# 短信中不能包含【 和】,发送前进行替换
tpl_value = tpl_value.replace("【", "[")
tpl_value = tpl_value.replace("】", "]")
params = {
"apikey": apikey,
"tpl_id": tpl_id,
"tpl_value": tpl_value,
"mobile": mobile,
}
try:
res = requests.post(
"http://" + cls.sms_host + cls.sms_tpl_send_uri,
data=params,
timeout=(1, 5),
)
except:
return False, "短信发送接口超时或异常, 请稍后重试"
response = res.json()
if response.get("code", 1) == 0:
return True, ""
else:
return False, response.get("detail", "验证码发送失败,请稍后再试")
@classmethod
def tpl_send_sms_with_text(
cls, tpl_value, mobile, sign_type=yunpian_sms_common_sign, apikey=YUNPIAN_SYSTEM_APIKEY
):
"""
模板接口发短信(文本传入)
"""
# 短信中不能包含【 和】,发送前进行替换
tpl_value = tpl_value.replace("【", "[")
tpl_value = tpl_value.replace("】", "]")
params = {
"apikey": apikey,
"mobile": mobile,
"text": "{}{}".format(sign_type, tpl_value),
}
try:
res = requests.post(
"http://" + cls.sms_host + cls.sms_text_send_uri,
data=params,
timeout=(1, 5),
)
except:
return False, "短信发送接口超时或异常, 请稍后重试"
response = res.json()
if response.get("code", 1) == 0:
return True, ""
else:
return False, response.get("detail", "验证码发送失败,请稍后再试")
@classmethod
def tpl_short_url(cls, long_url, apikey=YUNPIAN_SYSTEM_APIKEY):
"""获取短链接"""
params = {"apikey": apikey, "long_url": long_url}
try:
res = requests.post(
"http://" + cls.sms_host + cls.sms_short_url_uri,
data=params,
timeout=(1, 5),
)
except:
return False, "短信发送接口超时或异常, 请稍后重试"
response = res.json()
if response.get("code", 1) == 0:
return True, response["short_url"]["short_url"]
else:
return False, long_url
@classmethod
def tpl_send_sms_ret(cls, tpl_id, mobile, tpl_value, apikey=YUNPIAN_SYSTEM_APIKEY):
"""
单条发送接口,返回实际发送消耗的短信条数或发送失败原因
"""
params = {
"apikey": apikey,
"mobile": mobile,
"tpl_value": tpl_value,
"tpl_id": tpl_id,
}
try:
res = requests.post(
"https://" + cls.sms_host + cls.sms_tpl_send_uri,
data=params,
timeout=(1, 5),
)
except:
return False, "短信发送接口超时或返回异常,请稍后再试"
response = res.json()
if response.get("code", 1) == 0:
return True, response.get("count", 1)
else:
return False, response.get("detail") or response.get("msg", "短信发送失败,原因未知")
@classmethod
def send_sms_branch_ret(
cls, tpl_id, mobiles, tpl_value, callback_url=None, apikey=YUNPIAN_SYSTEM_APIKEY
):
"""
群发接口,返回所有结果
:param apikey: 用户唯一标识,在管理控制台获取
:param tpl_id: 模板id
:param mobiles: 单号码:15205201314 多号码:15205201314,15205201315
:param text: 已审核短信模板
:param callback_url: 短信发送后将向这个地址推送发送报告, 这个接口好像是同步接口,直接返回结果。。。异步回调还有必要吗?
:return: total_count, total_fee, unit, data
"""
params = {
"apikey": apikey,
"tpl_id": tpl_id,
"mobile": mobiles,
"tpl_value": tpl_value,
}
if callback_url:
params["callback_url"] = callback_url
headers = {
"Content-type": "application/x-www-form-urlencoded;charset=utf-8;",
"Accept": "application/json;charset=utf-8;",
"Connection": "keep-alive",
}
try:
res = requests.post(
"https://" + cls.sms_host + cls.sms_marketing_group_send_uri,
data=params,
headers=headers,
)
except:
return False, "短信发送接口超时或返回异常,请稍后再试"
response = res.json()
total_count = response.get("total_count", 0)
data = response.get("data", [])
return True, (total_count, data)
@classmethod
def send_yunpian_verify_code(cls, mobile, code, use, mode="text"):
"""发送短信验证码,模版内容:
【微商城助手】您的验证码是#code#。此验证码用于绑定手机号,5分钟内有效。
ps:这个单独用了一个不一样的签名,现在审核模版必须要加图形验证码,狗带 2018-07-19 by yy
"""
if mode == "text":
tpl_value = "您的验证码是{code}。此验证码用于绑定手机号".format(code=code)
return cls.tpl_send_sms_with_text(tpl_value, mobile)
else:
tpl_id = 4460930
tpl_value = "#code#={}&#use#={}".format(code, use)
return cls.tpl_send_sms(tpl_id, tpl_value, mobile)
class TencentSms:
"""腾讯短信"""
# 创建接口调用对象
ssender = SmsSingleSender(TENCENT_SMS_APPID, TENCENT_SMS_APPKEY)
@classmethod
def tpl_send_sms(cls, sms_text, mobile, smsType=0, smsSign=tencent_sms_common_sign):
""" 单发短信接口
:param text string 短信内容
:param mobile string 手机号
:param smsType int 签名类型 0: 普通短信, 1: 营销短信
:param smsSign string 签名内容
:rtype True or errmsg
"""
# 短信中不能包含【 和】,发送前进行替换
sms_text = sms_text.replace("【", "[")
sms_text = sms_text.replace("】", "]")
# 拼接签名和短信内容
sms_text = "{}{}".format(smsSign, sms_text)
try:
# 返回结果{'result': 1014, 'errmsg': 'package format error, sdkappid not have this tpl_id', 'ext': ''}
result = cls.ssender.send(
smsType, 86, mobile, sms_text, extend="", ext=""
) # 签名参数未提供或者为空时,会使用默认签名发送短信
except:
return False, "短信发送接口超时或返回异常,请稍后再试"
result_code = result["result"]
if result_code == 0:
return True, ""
else:
return False, result["errmsg"]
@classmethod
def send_tencent_verify_code(cls, mobile, code, use):
""" 发送短信验证码
模版内容:您的验证码是#code#。此验证码用于#use#,5分钟内有效。
param: mobile 手机号
param: code 验证码
param: use 验证码用途
"""
sms_text = "您的验证码是{code}。此验证码用于绑定手机号,5分钟内有效。".format(code=code)
return cls.tpl_send_sms(sms_text, mobile)
def gen_sms_code():
population_seq = "0123456789" # 组成验证码元素的序列
code_length = 4 # 验证码长度
code = "".join([random.choice(population_seq) for i in range(code_length)])
return code
<file_sep>/wsc_django/wsc_django/utils/lcsw.py
import datetime
import hashlib
import json
import urllib.parse
import uuid
import requests
from wsc_django.apps.settings import LCSW_HANDLE_HOST, LCSW_INST_NO, LCSW_INST_KEY
from wsc_django.utils.core import Emoji
def formatParaMap(paraMap, paraList, if_sort=False):
""" 格式化参数,签名过程需要使用 """
if if_sort:
slist = sorted(paraList)
else:
slist = paraList
buff = []
for k in slist:
v = paraMap[k]
buff.append("{0}={1}".format(k, v))
return "&".join(buff)
class LcswPay:
""" 利楚支付 """
@staticmethod
def getStrForSignOfTradeNotice(ret_dict):
""" 支付回调参数格式化 """
paraList = [
"return_code",
"return_msg",
"result_code",
"pay_type",
"user_id",
"merchant_name",
"merchant_no",
"terminal_id",
"terminal_trace",
"terminal_time",
"total_fee",
"end_time",
"out_trade_no",
"channel_trade_no",
"attach",
]
return formatParaMap(ret_dict, paraList)
@staticmethod
def getJspayParas(
order_num,
open_id,
create_time,
total_fee,
order_body,
notify_url,
merchant_no,
terminal_id,
access_token,
):
""" 获取公众号支付参数的参数格式化 """
parameters = {}
parameters["pay_ver"] = "100"
parameters["pay_type"] = "010"
parameters["service_id"] = "012"
parameters["merchant_no"] = merchant_no
parameters["terminal_id"] = terminal_id
parameters["terminal_trace"] = order_num # 终端流水号,填写商户系统的订单号
parameters["terminal_time"] = create_time
parameters["total_fee"] = total_fee
parameters["open_id"] = open_id
parameters["order_body"] = (
Emoji.filter_emoji(order_body).replace(" ", "").replace("&", "")
)
parameters["notify_url"] = notify_url
parameters["attach"] = "SENGUOPRODUCT"
paraList = [
"pay_ver",
"pay_type",
"service_id",
"merchant_no",
"terminal_id",
"terminal_trace",
"terminal_time",
"total_fee",
]
str_sign = (
formatParaMap(parameters, paraList) + "&access_token=%s" % access_token
)
parameters["key_sign"] = (
hashlib.md5(str_sign.encode("utf-8")).hexdigest().lower()
)
return parameters
@staticmethod
def getAuthOpenidUrl(merchant_no, terminal_id, access_token, redirect_uri):
""" 获取利楚支付的公众号支付用户open_id的跳转地址 """
str_sign = "merchant_no=%s&redirect_uri=%s&terminal_no=%s&access_token=%s" % (
merchant_no,
redirect_uri,
terminal_id,
access_token,
)
key_sign = hashlib.md5(str_sign.encode("utf-8")).hexdigest().lower()
parameters_str = (
"merchant_no=%s&terminal_no=%s&&redirect_uri=%s&key_sign=%s"
% (
merchant_no,
terminal_id,
urllib.parse.quote(redirect_uri, safe="?"),
key_sign,
)
)
url = "%s/wx/jsapi/authopenid?%s" % (LCSW_HANDLE_HOST, parameters_str)
return url
@staticmethod
def getStrForSignOfJspayRet(ret_dict):
""" 公众号支付签名 """
paraList = [
"return_code",
"return_msg",
"result_code",
"pay_type",
"merchant_name",
"merchant_no",
"terminal_id",
"terminal_trace",
"terminal_time",
"total_fee",
"out_trade_no",
]
buff = []
for key in paraList:
v = ret_dict[key] if ret_dict[key] else ""
buff.append("{0}={1}".format(key, v))
return "&".join(buff)
@staticmethod
def getRefundParas(
pay_type,
order_refund_num,
out_trade_no,
refund_fee,
merchant_no,
terminal_id,
access_token,
):
""" 获取退款参数 """
parameters = {}
parameters["pay_ver"] = "100"
parameters["pay_type"] = pay_type
parameters["service_id"] = "030"
parameters["merchant_no"] = merchant_no
parameters["terminal_id"] = terminal_id
parameters["terminal_trace"] = order_refund_num # 终端退款流水号,填写商户系统的退款流水号
terminal_time = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
parameters["terminal_time"] = terminal_time
parameters["refund_fee"] = refund_fee
parameters["out_trade_no"] = out_trade_no # 订单号,查询凭据,利楚订单号、微信订单号、支付宝订单号任意一个
paraList = [
"pay_ver",
"pay_type",
"service_id",
"merchant_no",
"terminal_id",
"terminal_trace",
"terminal_time",
"refund_fee",
"out_trade_no",
]
str_sign = (
formatParaMap(parameters, paraList, False)
+ "&access_token=%s" % access_token
)
parameters["key_sign"] = (
hashlib.md5(str_sign.encode("utf-8")).hexdigest().lower()
)
return parameters
@staticmethod
def getStrForSignOfRefundRet(ret_dict):
""" 退款返回签名字符串 """
paraList = [
"return_code",
"return_msg",
"result_code",
"pay_type",
"merchant_name",
"merchant_no",
"terminal_id",
"terminal_trace",
"terminal_time",
"refund_fee",
"end_time",
"out_trade_no",
"out_refund_no",
]
buff = []
for key in paraList:
v = ret_dict[key] if ret_dict[key] else ""
buff.append("{0}={1}".format(key, v))
return "&".join(buff)
@staticmethod
def getQueryParas(
pay_type,
order_query_num,
out_trade_no,
merchant_no,
terminal_id,
access_token,
pay_trace="",
pay_time="",
):
""" 获取订单查询参数 """
parameters = {}
parameters["pay_ver"] = "100"
parameters["pay_type"] = pay_type
parameters["service_id"] = "020"
parameters["merchant_no"] = merchant_no
parameters["terminal_id"] = terminal_id
parameters["terminal_trace"] = order_query_num # 终端查询流水号,填写商户系统的查询流水号
terminal_time = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
parameters["terminal_time"] = terminal_time
parameters[
"out_trade_no"
] = out_trade_no # 订单号,查询凭据,可填利楚订单号、微信订单号、支付宝订单号、银行卡订单号任意一个
parameters["pay_trace"] = pay_trace # 当前支付终端流水号,与pay_time同时传递,该字段可以传32位,文档有误
parameters[
"pay_time"
] = pay_time # 当前支付终端交易时间,yyyyMMddHHmmss,全局统一时间格式,与pay_trace同时传递
paraList = [
"pay_ver",
"pay_type",
"service_id",
"merchant_no",
"terminal_id",
"terminal_trace",
"terminal_time",
"out_trade_no",
]
str_sign = (
formatParaMap(parameters, paraList, False)
+ "&access_token=%s" % access_token
)
parameters["key_sign"] = (
hashlib.md5(str_sign.encode("utf-8")).hexdigest().lower()
)
return parameters
@staticmethod
def getStrForSignOfQueryRet(ret_dict):
paraList = [
"return_code",
"return_msg",
"result_code",
"pay_type",
"merchant_name",
"merchant_no",
"terminal_id",
"terminal_trace",
"terminal_time",
"total_fee",
"end_time",
"out_trade_no",
]
buff = []
for key in paraList:
v = ret_dict[key] if ret_dict[key] else ""
buff.append("{0}={1}".format(key, v))
return "&".join(buff)
class LcswFunds:
@staticmethod
def queryWithdrawal(merchant_no):
if not merchant_no:
return {"return_code": "02", "return_msg": "商户号不能为空", "result_code": "02"}
url = LCSW_HANDLE_HOST.rstrip("/") + "/merchant/100/withdraw/query"
paralist = ["inst_no", "trace_no", "merchant_no"]
data = dict(
inst_no=LCSW_INST_NO, trace_no=uuid.uuid4().hex, merchant_no=merchant_no
)
str_sign = "{}&key={}".format(
formatParaMap(data, paralist, True), LCSW_INST_KEY
)
data["key_sign"] = hashlib.md5(str_sign.encode("utf-8")).hexdigest()
r = requests.post(url=url, json=data, timeout=(1, 10))
result = json.loads(r.text)
return result
<file_sep>/wsc_django/wsc_django/apps/order/migrations/0001_initial.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('customer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('create_date', models.DateField(auto_now_add=True, verbose_name='下单日期')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='下单时间')),
('delivery_method', models.SmallIntegerField(default=1, verbose_name='配送方式,1:送货上门,2:客户自提')),
('delivery_period', models.CharField(max_length=32, verbose_name='自提处理时段')),
('order_num', models.CharField(max_length=20, unique=True, verbose_name='订单号')),
('order_status', models.SmallIntegerField(default=1, verbose_name='订单状态,具体见constant')),
('remark', models.CharField(default='', max_length=64, verbose_name='订单备注')),
('pay_type', models.SmallIntegerField(default=2, verbose_name='订单支付方式')),
('order_type', models.SmallIntegerField(default=1, verbose_name='订单类型,1:普通订单,2:拼团订单')),
('amount_gross', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='货款金额(优惠前)')),
('amount_net', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='货款金额(优惠后)')),
('delivery_amount_gross', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='货款金额运费(优惠前)')),
('delivery_amount_net', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='货款金额运费(优惠后)')),
('total_amount_gross', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='订单金额(优惠前)')),
('total_amount_net', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='订单金额(优惠后)')),
('refund_type', models.SmallIntegerField(default=2, verbose_name='订单退款方式')),
],
options={
'verbose_name': '订单',
'verbose_name_plural': '订单',
'db_table': 'order',
},
),
migrations.CreateModel(
name='OrderAddress',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('province', models.IntegerField(verbose_name='省份编码')),
('city', models.IntegerField(verbose_name='城市编码')),
('county', models.IntegerField(verbose_name='区编码')),
('address', models.CharField(max_length=64, verbose_name='详细地址')),
('name', models.CharField(max_length=32, verbose_name='收件人姓名')),
('sex', models.SmallIntegerField(default=0, verbose_name='收件人性别,0:未知1:男2:女')),
('phone', models.CharField(default='', max_length=32, verbose_name='收件人手机号')),
],
options={
'verbose_name': '订单地址',
'verbose_name_plural': '订单地址',
'db_table': 'order_address',
},
),
migrations.CreateModel(
name='OrderDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('create_date', models.DateField(auto_now_add=True, verbose_name='下单日期')),
('quantity_gross', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='量(优惠前)')),
('quantity_net', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='量(优惠后)')),
('price_gross', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='单价(优惠前)')),
('price_net', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='单价(优惠后)')),
('amount_gross', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='金额(优惠前)')),
('amount_net', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='金额(优惠后)')),
('status', models.SmallIntegerField(verbose_name='订单状态,同order')),
('pay_type', models.SmallIntegerField(verbose_name='支付方式,同order')),
('refund_type', models.SmallIntegerField(null=True, verbose_name='退款方式,同order')),
('promotion_type', models.SmallIntegerField(default='', verbose_name='活动类型(预留)')),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='customer.customer', verbose_name='订单对应客户对象')),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='order_detail', to='order.order', verbose_name='对应的订单对象')),
],
options={
'verbose_name': '订单详情',
'verbose_name_plural': '订单详情',
'db_table': 'order_detail',
},
),
]
<file_sep>/wsc_django/wsc_django/apps/demo/views.py
"""测试使用"""
from rest_framework.pagination import PageNumberPagination
from rest_framework_jwt.settings import api_settings
from demo.serializers import DemoSerializer
from user.models import User
from wsc_django.utils.views import GlobalBaseView
class StandardResultsSetPagination(PageNumberPagination):
page_size = 6
page_size_query_param = 'page_size'
max_page_size = 20
class DemoView(GlobalBaseView):
"""测试使用"""
# permission_classes = [WSCAdminPermission]
# authentication_classes = (WSCIsLoginAuthenticate,)
serializer_class = DemoSerializer
pagination_class = StandardResultsSetPagination
def get(self, request):
user_id = request.query_params.get("user_id")
user = User.objects.filter(id=user_id).first()
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
import datetime
payload = jwt_payload_handler(user, datetime.timedelta(days=1))
token = jwt_encode_handler(payload)
return self.send_success(token=token)
def post(self, request):
from wsc_django.utils.authenticate import SimpleEncrypt
import time
timestamp = int(time.time())
id = 1
text = '%d@%d'%(timestamp, id)
res = SimpleEncrypt.encrypt(text)
return self.send_success(data=res)
<file_sep>/wsc_django/wsc_django/apps/product/models.py
from django.db import models
from shop.models import Shop
from product.constant import (
ProductStatus,
ProductGroupDefault
)
from wsc_django.utils.models import TimeBaseModel
class ProductGroup(TimeBaseModel):
"""商品分组模型类"""
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, null=False, verbose_name="对应的店铺对象")
name = models.CharField(max_length=32, null=False, verbose_name="商品分组名称")
description = models.CharField(max_length=128, default="", verbose_name="商品分组描述")
parent = models.ForeignKey('self', null=True, blank=True, on_delete=models.CASCADE,verbose_name="该商品分组的父级ID")
sort = models.IntegerField(null=True, verbose_name="商品分组排序")
level = models.SmallIntegerField(null=True, verbose_name="商品分组级别")
default = models.SmallIntegerField(
null=False,
default=ProductGroupDefault.NO,
verbose_name="是否为默认分组, 0:否,1:是",
)
class Meta:
db_table = "product_group"
verbose_name = "货品分组"
verbose_name_plural = verbose_name
def set_default_sort(self):
self.sort = self.id
class Product(TimeBaseModel):
"""货品模型类"""
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, null=False, verbose_name="货品对应的商铺对象")
name = models.CharField(max_length=64, null=False, verbose_name="货品名称")
name_acronym = models.CharField(max_length=64, null=False, verbose_name="货品名称拼音")
group = models.ForeignKey(ProductGroup, on_delete=models.CASCADE, verbose_name="货品分组ID")
price = models.DecimalField(max_digits=13, decimal_places=4, null=False, verbose_name="货品单价")
storage = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
default=0,
verbose_name="货品库存"
)
code = models.CharField(max_length=32, default="", verbose_name="货品编码")
summary = models.CharField(max_length=128, default="", verbose_name="货品简介")
cover_image_url = models.CharField(max_length=512, default="", verbose_name="货品封面图")
description = models.TextField(default="", verbose_name="货品详情描述")
status = models.SmallIntegerField(
null=False,
default=ProductStatus.ON,
verbose_name="货品状态, 0:删除, 1:上架, 2:下架",
)
class Meta:
db_table = "product"
verbose_name = "货品"
verbose_name_plural = verbose_name
class ProductPicture(TimeBaseModel):
"""货品轮播图模型类"""
product = models.ForeignKey(Product, on_delete=models.CASCADE, null=False, verbose_name="对应货品对象")
image_url = models.CharField(max_length=512, verbose_name="货品轮播图url")
class Meta:
db_table = "product_picture"
verbose_name = "货品轮播图"
verbose_name_plural = verbose_name
<file_sep>/wsc_django/wsc_django/apps/order/views.py
import decimal
from rest_framework import status
from webargs import fields, validate
from webargs.djangoparser import use_args
from delivery.constant import DeliveryType
from delivery.serializers import AdminDeliverySerializer
from logs.constant import OrderLogType
from order.constant import OrderDeliveryMethod, OrderPayType, OrderType, OrderStatus, OrderRefundType
from order.services import order_data_check, set_order_paid, cancel_order, count_paid_order, \
set_order_status_confirmed_finish, refund_order
from promotion.constant import PromotionType
from wsc_django.utils.arguments import StrToList
from wsc_django.utils.pagination import StandardResultsSetPagination
from wsc_django.utils.views import MallBaseView, AdminBaseView
from product.constant import ProductStatus
from user.constant import Sex
from order.selectors import (
get_customer_order_with_detail_by_id,
get_customer_order_by_id,
list_customer_order_by_customer_ids,
list_shop_orders, count_abnormal_order, get_shop_order_by_num,
get_shop_order_by_num_without_details,
get_shop_order_by_shop_id_and_id,
get_order_by_shop_id_and_id, list_shop_abnormal_orders)
from order.serializers import (
MallOrderCreateSerializer,
MallOrderSerializer,
MallOrdersSerializer,
AdminOrdersSerializer,
AdminOrderSerializer,
OrderLogSerializer
)
from order.interface import (
list_product_by_ids_interface,
get_product_by_id_interface,
jsapi_params_interface,
auto_cancel_order_interface,
# order_commit_tplmsg_interface,
list_customer_ids_by_user_id_interface,
get_customer_by_user_id_and_shop_id_interface,
list_order_log_by_shop_id_and_order_num_interface,
get_order_delivery_by_delivery_id_interface,
print_order_interface, create_order_delivery_interface, get_msg_notify_by_shop_id_interface,
order_delivery_tplmsg_interface, order_finish_tplmsg_interface, order_refund_tplmsg_interface,
auto_validate_groupon_attend_interface, get_product_promotion_interface, get_groupon_by_id_interface,
count_groupon_attend_by_groupon_id_and_customer_id_interface)
_MAP_VALIDATE_PRODUCT = {}
def register_order_product_verify(key):
def register(func):
_MAP_VALIDATE_PRODUCT[key] = func
return func
return register
class AdminOrdersView(AdminBaseView):
"""后台-订单-获取订单列表"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_ORDER]
)
@use_args(
{
"order_types": StrToList(
required=False,
missing=[OrderType.NORMAL, OrderType.GROUPON],
validate=[validate.ContainsOnly([OrderType.NORMAL, OrderType.GROUPON])],
comment="订单类型筛选 1: 普通订单, 5: 拼团订单",
),
"order_pay_types": StrToList(
required=False,
missing=[OrderPayType.WEIXIN_JSAPI, OrderPayType.ON_DELIVERY],
validate=[
validate.ContainsOnly(
[OrderPayType.WEIXIN_JSAPI, OrderPayType.ON_DELIVERY]
)
],
comment="订单支付方式筛选 1: 微信支付, 2: 货到付款",
),
"order_delivery_methods": StrToList(
required=False,
missing=[
OrderDeliveryMethod.HOME_DELIVERY,
OrderDeliveryMethod.CUSTOMER_PICK,
],
validate=[
validate.ContainsOnly(
[
OrderDeliveryMethod.HOME_DELIVERY,
OrderDeliveryMethod.CUSTOMER_PICK,
]
)
],
comment="订单配送方式筛选 1: 送货上门, 2: 自提",
),
"order_status": StrToList(
required=False,
missing=[
OrderStatus.PAID,
OrderStatus.CONFIRMED,
OrderStatus.FINISHED,
OrderStatus.REFUNDED,
],
validate=[
validate.ContainsOnly(
[
OrderStatus.PAID,
OrderStatus.CONFIRMED,
OrderStatus.FINISHED,
OrderStatus.REFUNDED,
]
)
],
comment="订单状态筛选 2: 未处理 3: 处理中 4: 已完成 5: 已退款",
),
"num": fields.String(
required=False, data_key="order_num", comment="订单号搜索,与其他条件互斥"
),
},
location="query"
)
def get(self, request, args):
shop_id = self.current_shop.id
order_list = list_shop_orders(shop_id, **args)
order_list = self._get_paginated_data(order_list, AdminOrdersSerializer)
return self.send_success(data_list=order_list)
class AdminOrderView(AdminBaseView):
"""后台-订单-获取订单详情"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_ORDER]
)
@use_args(
{
"order_num": fields.String(
comment="订单id", validate=lambda num: len(num) == 19
)
},
location="query"
)
def get(self, request, args):
success, order = get_shop_order_by_num(
self.current_shop.id, args["order_num"]
)
if not success:
return self.send_fail(error_text=order)
serializer = AdminOrderSerializer(order)
return self.send_success(data=serializer.data)
class AdminOrderPrintView(AdminBaseView):
"""后台-订单-打印订单"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_ORDER]
)
@use_args({"order_id": fields.Integer(required=True, comment="打印的订单id")}, location="json")
def post(self, request, args):
success, order = get_shop_order_by_shop_id_and_id(
self.current_shop.id, args["order_id"]
)
if not success:
return self.send_fail(error_text=order)
success, msg = print_order_interface(order, self.current_user.id)
if not success:
return self.send_fail(error_text=msg)
return self.send_success()
class AdminOrderConfirmView(AdminBaseView):
"""后台-订单-开始订单"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_ORDER]
)
@use_args(
{
"order_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="订单ID"
),
"delivery_type": fields.Integer(
required=False,
validate=[
validate.OneOf(
[DeliveryType.ExpressDelivery, DeliveryType.StaffDelivery]
)
],
comment="配送类型",
),
"express": fields.Nested(
{
"company": fields.String(required=True, comment="快递公司"),
"express_num": fields.String(required=True, comment="快递单号"),
},
required=False,
unknown=True,
comment="快递信息",
),
},
location="json",
)
def post(self, request, args):
shop_id = self.current_shop.id
order = get_order_by_shop_id_and_id(shop_id, args.get("order_id"))
if not order:
return self.send_fail(error_text="订单不存在")
elif order.order_status != OrderStatus.PAID:
return self.send_fail(error_text="订单状态已改变")
# 送货上门
if order.delivery_method == OrderDeliveryMethod.HOME_DELIVERY:
if not args.get("delivery_type"):
return self.send_fail(error_text="送货上门必须选择配送方式")
if args.get(
"delivery_type"
) == DeliveryType.ExpressDelivery and not args.get("express"):
return self.send_fail(error_text="快递配送必须填写快递公司与单号")
# 创建一个配送记录
delivery_info = {"delivery_type": args.get("delivery_type")}
delivery_info.update(args.get("express", {}))
delivery = create_order_delivery_interface(delivery_info)
order.delivery_id = delivery.id
set_order_status_confirmed_finish(
order,
OrderStatus.CONFIRMED,
self.current_user.id,
OrderLogType.CONFIRM,
)
# 获取店铺的消息提醒设置, 发送订单配送通知(订单确认)
msg_notify = get_msg_notify_by_shop_id_interface(shop_id)
# if msg_notify.order_confirm_wx:
# order_delivery_tplmsg_interface(order.id)
return self.send_success()
class AdminOrderDirectView(AdminBaseView):
"""后台-订单-一键完成订单"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_ORDER]
)
@use_args(
{
"order_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="订单ID"
)
},
location="json",
)
def post(self, request, args):
shop_id = self.current_shop.id
order = get_order_by_shop_id_and_id(shop_id, args.get("order_id"))
if not order:
return self.send_fail(error_text="订单不存在")
elif order.order_status != OrderStatus.PAID:
return self.send_fail(error_text="订单状态已改变")
# 创建一个配送记录
if order.delivery_method == OrderDeliveryMethod.HOME_DELIVERY:
delivery_info = {"delivery_type": DeliveryType.StaffDelivery}
delivery = create_order_delivery_interface(delivery_info)
order.delivery_id = delivery.id
set_order_status_confirmed_finish(
order,
OrderStatus.FINISHED,
self.current_user.id,
OrderLogType.DIRECT,
)
msg_notify = get_msg_notify_by_shop_id_interface(shop_id)
# if msg_notify.order_finish_wx:
# order_finish_tplmsg_interface(order.id)
return self.send_success()
class AdminOrderFinishView(AdminBaseView):
"""后台-订单-完成订单"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_ORDER]
)
@use_args(
{
"order_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="订单ID"
)
},
location="json",
)
def post(self, request, args):
shop_id = self.current_shop.id
order = get_order_by_shop_id_and_id(shop_id, args.get("order_id"))
if not order:
return self.send_fail(error_text="订单不存在")
elif order.order_status != OrderStatus.CONFIRMED:
return self.send_fail(error_text="订单状态已改变")
set_order_status_confirmed_finish(
order,
OrderStatus.FINISHED,
self.current_user.id,
OrderLogType.FINISH,
)
# 获取店铺的消息提醒设置, 发送微信模板消息
msg_notify = get_msg_notify_by_shop_id_interface(shop_id)
if msg_notify.order_finish_wx:
order_finish_tplmsg_interface(order.id)
return self.send_success()
class AdminOrderRefundView(AdminBaseView):
"""后台-订单-退款"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_ORDER]
)
@use_args(
{
"order_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="订单ID"
),
"refund_type": fields.Integer(
required=True,
validate=[
validate.OneOf(
[
OrderRefundType.WEIXIN_JSAPI_REFUND,
OrderRefundType.UNDERLINE_REFUND,
]
)
],
),
},
location="json",
)
def post(self, request, args):
shop_id = self.current_shop.id
order = get_order_by_shop_id_and_id(shop_id, args.get("order_id"))
if not order:
return self.send_fail(error_text="订单不存在")
elif order.order_status not in [
OrderStatus.PAID,
OrderStatus.CONFIRMED,
OrderStatus.FINISHED,
OrderStatus.REFUND_FAIL,
]:
return self.send_fail(error_text="订单状态已改变")
if (
order.pay_type == OrderPayType.ON_DELIVERY
and args["refund_type"] == OrderRefundType.WEIXIN_JSAPI_REFUND
):
return self.send_fail(error_text="货到付款的订单只能进行线下退款")
success, msg = refund_order(
self.current_shop.id,
order,
args["refund_type"],
self.current_user.id,
)
if not success:
return self.send_fail(error_obj=msg)
# 获取店铺的消息提醒设置, 发送微信模板消息
msg_notify = get_msg_notify_by_shop_id_interface(shop_id)
# if msg_notify.order_refund_wx and order.pay_type == OrderPayType.WEIXIN_JSAPI:
# order_refund_tplmsg_interface(order.id)
return self.send_success()
class AdminOrderOperateLogView(AdminBaseView):
"""后台-订单-订单操作记录"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_ORDER]
)
@use_args(
{
"order_num": fields.String(
comment="订单id", validate=lambda num: len(num) == 19
)
},
location="query"
)
def get(self, request, args):
shop_id = self.current_shop.id
order = get_shop_order_by_num_without_details(shop_id, args["order_num"])
if not order:
return self.send_fail(error_text="订单不存在")
# 操作记录获取
log_list = list_order_log_by_shop_id_and_order_num_interface(shop_id, order.order_num)
# 配送信息获取
delivery = get_order_delivery_by_delivery_id_interface(order.delivery_id)
delivery_data = AdminDeliverySerializer(delivery).data
# 打印次数获取
print_count = 0
for ll in log_list:
if ll.operate_type == OrderLogType.PRINT:
print_count += 1
# 分页操作
log_list = self._get_paginated_data(log_list, OrderLogSerializer)
data = {
"log_list": log_list,
"delivery": delivery_data,
"print_count": print_count,
}
return self.send_success(data=data)
class AdminOrderPaidCountView(AdminBaseView):
"""后台-订单-未处理订单数"""
def get(self, request):
count = count_paid_order(self.current_shop.id)
return self.send_success(count=count)
class AdminAbnormalOrderCountView(AdminBaseView):
"""后台-订单-异常订单数"""
def get(self, request):
count = count_abnormal_order(self.current_shop.id)
return self.send_success(count=count)
class AdminAbnormalOrdersView(AdminBaseView):
"""后台-订单-获取异常订单列表"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_ORDER]
)
@use_args(
{
"order_types": StrToList(
required=False,
missing=[OrderType.GROUPON],
validate=[validate.ContainsOnly([OrderType.GROUPON])],
comment="订单类型筛选 1: 普通订单, 5: 拼团订单",
),
"order_pay_types": StrToList(
required=False,
missing=[OrderPayType.ON_DELIVERY, OrderPayType.WEIXIN_JSAPI],
validate=[
validate.ContainsOnly(
[OrderPayType.WEIXIN_JSAPI, OrderPayType.ON_DELIVERY]
)
],
comment="订单支付方式筛选 1: 微信支付, 2: 货到付款",
),
"order_delivery_methods": StrToList(
required=False,
missing=[DeliveryType.StaffDelivery, DeliveryType.ExpressDelivery],
validate=[
validate.ContainsOnly(
[DeliveryType.ExpressDelivery, DeliveryType.StaffDelivery]
)
],
comment="订单配送方式筛选 1: 送货上门, 2: 自提",
),
"order_status": StrToList(
required=False,
missing=[OrderStatus.REFUND_FAIL],
validate=[validate.ContainsOnly([OrderStatus.REFUND_FAIL])],
comment="订单状态筛选 6: 退款失败",
),
"order_num": fields.String(comment="订单号搜索,与其他条件互斥"),
},
location="query"
)
def get(self, request, args):
orders = list_shop_abnormal_orders(
self.current_shop.id, **args
)
orders = self._get_paginated_data(orders, AdminOrderSerializer)
return self.send_success(data_list=orders)
class MallOrderView(MallBaseView):
"""商城端-提交订单&订单详情"""
@use_args(
{
"cart_items": fields.Nested(
{
"product_id": fields.Integer(required=True, comment="货品ID"),
"quantity": fields.Decimal(required=True, comment="货品下单量"),
"price": fields.Decimal(required=True, comment="货品单价"),
"amount": fields.Decimal(required=True, comment="货品总金额"),
},
required=True,
validate=[validate.Length(1)],
many=True,
unknown=True,
comment="订单货品详情",
),
"delivery_amount": fields.Decimal(required=True, comment="订单运费"),
"total_amount": fields.Decimal(required=True, comment="订单总金额"),
"address": fields.Nested(
{
"name": fields.String(required=True, comment="收货人姓名"),
"phone": fields.String(required=True, comment="收货人手机号"),
"sex": fields.Integer(
required=True,
validate=[validate.OneOf([Sex.UNKNOWN, Sex.MALE, Sex.FEMALE])],
comment="性别",
),
"address": fields.String(required=True, comment="详细地址"),
"province": fields.Integer(required=True, comment="省编码"),
"city": fields.Integer(required=True, comment="市编码"),
"county": fields.Integer(required=True, comment="区编码"),
},
required=True,
unknown=True,
comment="订单地址",
),
"delivery_method": fields.Integer(
required=True,
validate=validate.OneOf(
[
OrderDeliveryMethod.HOME_DELIVERY,
OrderDeliveryMethod.CUSTOMER_PICK,
]
),
comment="配送方式:1:送货上门,2:客户自提",
),
"delivery_period": fields.String(comment="自提时间段(仅自提必传),举例:今天 12:00~13:00"),
"pay_type": fields.Integer(
required=True,
validate=validate.OneOf(
[OrderPayType.WEIXIN_JSAPI, OrderPayType.ON_DELIVERY]
),
comment="支付方式:1:微信支付,2:货到付款",
),
"wx_openid": fields.String(comment="微信支付openid"),
"remark": fields.String(validate=validate.Length(0, 30), comment="订单备注"),
"groupon_attend_id": fields.Integer(comment="拼团活动参与id"),
},
location="json",
)
def post(self, request, args, shop_code):
self._set_current_shop(request, shop_code)
shop_id = self.current_shop.id
user_id = self.current_user.id
promotion_attend_id = args.pop("groupon_attend_id", 0)
args["promotion_attend_id"] = promotion_attend_id
args["promotion_type"] = 1 if promotion_attend_id else 0
# 订单数据校验
success, order_info = order_data_check(shop_id, user_id, args)
if not success:
return self.send_fail(error_text=order_info)
promotion_attend = order_info.pop("promotion_attend")
serializer = MallOrderCreateSerializer(
data=order_info,
context={"promotion_attend": promotion_attend, "cart_items": args.get("cart_items")}
)
if not serializer.is_valid():
return self.send_error(
error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
order = serializer.save()
if order.pay_type == OrderPayType.WEIXIN_JSAPI:
success, params = jsapi_params_interface(
order, args["wx_openid"]
)
if not success:
return self.send_fail(error_obj=params)
auto_cancel_order_interface(order.shop_id, order.id)
return self.send_success(data=params, order_id=order.id)
else:
set_order_paid(order)
# 订单提交成功微信提醒, 暂时只有普通订单才发送消息,且页面没有控制按钮
if order.order_type == OrderType.NORMAL:
# 测试省略
pass
# order_commit_tplmsg_interface(order.id)
auto_validate_groupon_attend_interface(
order.shop_id, order.groupon_attend
)
return self.send_success(order_id=order.id)
@use_args(
{
"order_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="订单ID"
)
},
location="query"
)
def get(self, request, args, shop_code):
customer_ids = list_customer_ids_by_user_id_interface(self.current_user.id)
if not customer_ids:
return self.send_fail(error_text="订单不存在")
ret, info = get_customer_order_with_detail_by_id(customer_ids, args.get("order_id"))
if not ret:
return self.send_fail(error_text=info)
# 不需要顾客信息
info.customer = None
serializer = MallOrderSerializer(info)
return self.send_success(data=serializer.data)
class MallCartVerifyView(MallBaseView):
"""商城端-购物篮确认时检验"""
@use_args(
{
"product_ids": fields.List(
fields.Integer(required=True),
required=True,
validate=[validate.Length(1)],
comment="商品ID",
)
},
location="json"
)
def post(self, request, args, shop_code):
self._set_current_shop(request, shop_code)
shop = self.current_shop
product_list = list_product_by_ids_interface(shop.id, **args)
for product in product_list:
if product.status == ProductStatus.OFF:
return self.send_fail(
error_text="商品{product_name}已下架, 看看别的商品吧".format(product_name=product.name)
)
return self.send_success()
class MallProductVerifyView(MallBaseView):
"""商城-确认订单时校验"""
@use_args(
{
"product_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="货品ID"
),
"promotion": fields.Integer(
required=True,
validate=[
validate.OneOf([PromotionType.NORMAL, PromotionType.GROUPON])
],
comment="货品所处的活动, 0:没活动,1:拼团",
),
},
location="json",
)
def post(self, request, args, shop_code):
self._set_current_shop(request, shop_code)
product_id = args.get("product_id")
# 验证下架
product = get_product_by_id_interface(
self.current_shop.id, product_id
)
if not product or product.status != ProductStatus.ON:
return self.send_fail(error_text="商品已下架, 看看别的商品吧")
# 验证活动
promotion = args.get("promotion")
validate_func = _MAP_VALIDATE_PRODUCT.get(promotion)
success, error_text = validate_func(self, product)
if not success:
return self.send_fail(error_text=error_text)
return self.send_success()
@register_order_product_verify(0)
def validate_normal_product(self, product):
"""验证普通活动"""
return True, None
@register_order_product_verify(1)
def validate_groupon_product(self, product):
"""验证拼团商品"""
event = get_product_promotion_interface(self.current_shop.id, product.id)
if not event or event._event_type != "1":
return False, "活动已结束, 看看其他商品吧"
customer = get_customer_by_user_id_and_shop_id_interface(
self.current_shop.id, self.current_user.id
)
if customer:
groupon_id = event.content.get("id")
groupon = get_groupon_by_id_interface(
self.current_shop.id, groupon_id
)
total_attend_count = count_groupon_attend_by_groupon_id_and_customer_id_interface(
groupon_id, customer.id
)
if groupon.attend_limit and total_attend_count >= groupon.attend_limit:
return (
False,
"最多参加{attend_limit}次, 您已达到上限".format(
attend_limit=groupon.attend_limit
),
)
return True, None
class MallOrdersView(MallBaseView):
"""商城-订单列表"""
pagination_class = StandardResultsSetPagination
@use_args(
{
"order_status": StrToList(
required=False,
missing=[],
validate=[
validate.ContainsOnly(
[
OrderStatus.CANCELED,
OrderStatus.UNPAID,
OrderStatus.PAID,
OrderStatus.CONFIRMED,
OrderStatus.FINISHED,
OrderStatus.REFUNDED,
OrderStatus.REFUND_FAIL,
OrderStatus.WAITTING,
]
)
],
comment="订单状态列表",
),
},
location="query",
)
def get(self, reuqest, args, shop_code):
self._set_current_shop(reuqest, shop_code)
user_id = self.current_user.id
shop_id = self.current_shop.id
customer = get_customer_by_user_id_and_shop_id_interface(user_id, shop_id)
# 不是客户在这个店肯定没单
if not customer:
return self.send_success(data_list=[])
order_list = list_customer_order_by_customer_ids([customer.id], args.get('order_status'))
order_list = self._get_paginated_data(order_list, MallOrdersSerializer)
return self.send_success(data_list=order_list)
class MallOrderCancellationView(MallBaseView):
"""商城-订单-取消订单"""
@use_args(
{
"order_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="订单ID"
)
},
location="json",
)
def put(self, request, args):
customer_ids = list_customer_ids_by_user_id_interface(self.current_user.id)
if not customer_ids:
return self.send_fail(error_text="订单不存在")
order = get_customer_order_by_id(customer_ids, args.get("order_id"))
if not order:
return self.send_fail(error_text="订单不存在")
if order.order_status != OrderStatus.UNPAID:
return self.send_fail(error_text="订单状态已改变")
success, msg = cancel_order(order.shop_id, order.id)
if not success:
if order.order_type == OrderType.GROUPON:
auto_validate_groupon_attend_interface(
order.shop_id, order.groupon_attend
)
return self.send_fail(error_obj=msg)
return self.send_success()
class MallOrderPaymentView(MallBaseView):
"""商城-订单-支付"""
@use_args(
{
"order_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="订单ID"
),
"wx_openid": fields.String(required=True, comment="微信openID"),
},
location="json",
)
def put(self, request, args):
customer_ids = list_customer_ids_by_user_id_interface(self.current_user.id)
if not customer_ids:
return self.send_fail(error_text="订单不存在")
order = get_customer_order_by_id(customer_ids, args.get("order_id"))
assert order.pay_type == OrderPayType.WEIXIN_JSAPI
if not order:
return self.send_fail(error_text="订单不存在")
elif order.order_status != OrderStatus.UNPAID:
return self.send_fail(error_text="订单状态已改变")
success, params = jsapi_params_interface(order, args["wx_openid"])
if not success:
return self.send_fail(error_obj=params)
return self.send_success(data=params)
<file_sep>/wsc_django/wsc_django/apps/storage/models.py
from django.db import models
from product.models import Product
from shop.models import Shop
from user.models import User
from storage.constant import (
ProductStorageRecordOperatorType,
ProductStorageRecordType,
ProductStorageRecordStatus,
)
from wsc_django.utils.models import TimeBaseModel
class ProductStorageRecord(TimeBaseModel):
"""货品库存变更记录"""
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, null=False, verbose_name="对应的店铺对象")
product = models.ForeignKey(Product, on_delete=models.CASCADE, null=False, verbose_name="对应货品对象")
operator_type = models.SmallIntegerField(
null=False,
default=ProductStorageRecordOperatorType.STAFF,
verbose_name="操作人类型,1:员工,2:客户",
)
user = models.ForeignKey(User, on_delete=models.CASCADE, null=False, verbose_name="对应的用户对象")
create_time = models.DateTimeField(null=False,auto_now_add=True, verbose_name="货品库存变更记录创建时间")
type = models.SmallIntegerField(
null=False,
default=ProductStorageRecordType.MALL_SALE,
verbose_name="货品库存记录变更类型",
)
change_storage = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
verbose_name="本次操作变更量",
)
current_storage = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
verbose_name="历史时刻当前库存",
)
order_num = models.CharField(max_length=20, verbose_name="订单号")
status = models.SmallIntegerField(
null=False,
default=ProductStorageRecordStatus.NORMAL,
verbose_name="状态"
)
class Meta:
db_table = "product_storage_record"
verbose_name = "货品库存变更记录"
verbose_name_plural = verbose_name
<file_sep>/wsc_django/wsc_django/apps/logs/services.py
import datetime
from collections import defaultdict
from logs.constant import OrderLogType, MAP_NO_OPERATOR_ORDER_TYPE, OperateLogModule
from logs.models import OrderLog, OperateLogUnify, ConfigLog, PromotionLog, ProductLog, LogBaseModel, StaffLog
def get_all_module_dict():
module_dict = {}
for k, v in vars(OperateLogModule).items():
if not k.startswith("_"):
module_dict[k] = v
return module_dict
def _create_operate_log_unify(log: LogBaseModel):
"""
在操作记录统一表中创建一条操作记录
:param log:
:return:
"""
log_info = {
"shop_id": log.shop_id,
"operator_id": log.operator.id,
"operate_time": log.operate_time,
"operate_module": log.operate_module,
"log_id": log.id,
}
operate_log = OperateLogUnify(**log_info)
operate_log.save()
def create_order_log(log_info: dict):
"""
创建一个订单操作记录
:param log_info: {
"order_id": 1,
"oder_num": "xxxx",
"shop_id": 1,
"operator_id": 1,
"operate_type": 1,
"operate_content": ""
}
:return:
"""
order_log = OrderLog(**log_info)
order_log.save()
_create_operate_log_unify(order_log)
return order_log
def create_config_log(log_info: dict):
"""
创建一条设置模块操作记录
:param log_info: {
"shop_id": shop_id,
"operator_id": user_id,
"operate_type": ConfigLogType.SHOP_NAME,
"operate_content": ""
}
:return:
"""
config_log = ConfigLog(**log_info)
config_log.save()
_create_operate_log_unify(config_log)
return config_log
def create_promotion_log(log_info: dict):
"""
创建一个玩法日志
:param log_info: {
"shop_id": shop_id,
"operator_id": user_id,
"operate_type": PromotionLogType.ADD_GROUPON,
"operate_content": groupon_name
}
:return:
"""
promotion_log = PromotionLog(**log_info)
promotion_log.save()
_create_operate_log_unify(promotion_log)
return promotion_log
def create_product_log(log_info: dict):
"""
创建一条货品板块操作记录
:param log_info: {
"shop_id": shop_id,
"operator_id": user_id,
"operate_type": ProductLogType.ADD_PRODUCT,
"operate_content": ""
}
:return:
"""
product_log = ProductLog(**log_info)
product_log.save()
_create_operate_log_unify(product_log)
return product_log
def create_staff_log(log_info: dict):
"""
创建一条员工操作日志
:param log_info: {
"shop_id": shop_id,
"operator_id": user_id,
"operate_type": StaffLogType.ADD_STAFF,
"staff_id": staff_id,
"operate_content": ""
}
:return:
"""
staff_log = StaffLog(**log_info)
staff_log.save()
_create_operate_log_unify(staff_log)
return staff_log
def get_order_log_time_by_order_num(order_num: str):
"""
通过订单号从操作记录获取一个开始订单开始配送时间(订单确认时间)or配送完成时间(订单完成时间)
:param order_num:
:return:
"""
order_log = OrderLog.objects.filter(
order_num=order_num,
operate_type__in=[
OrderLogType.DIRECT, OrderLogType.CONFIRM, OrderLogType.FINISH
]
).order_by("-operate_time").first()
return order_log.operate_time
def list_order_log_by_shop_id_and_order_num(shop_id: int, order_num: str):
"""
通过订单号获取一个订单操作记录,带店铺ID版
:param shop_id:
:param order_num:
:return:
"""
log_list = (
OrderLog.objects.filter(shop_id=shop_id, order_num=order_num)
.order_by("-operate_time")
.all()
)
for log in log_list:
# 自动操作时,操作人id为0
if not log.operator_id:
operate_type = MAP_NO_OPERATOR_ORDER_TYPE[log.operate_type]
log.operate_type = operate_type
return log_list
def list_one_module_log_by_ids(module_id: int, log_ids: list):
"""
通过IDS查询一种日志
:param module_id:
:param log_ids:
:return:
"""
Model = OperateLogUnify.get_operate_log_model(module_id)
log_list = Model.objects.filter(id__in=log_ids).order_by("id").all()
return log_list
def list_one_module_log_by_filter(
shop_id: int,
module_id: int,
operator_ids: list,
from_date: datetime,
end_date: datetime,
):
"""
查询一种模块的操作记录
:param shop_id:
:param module_id:
:param operator_ids:
:param from_date:
:param end_date:
:return:
"""
Model = OperateLogUnify.get_operate_log_model(module_id)
log_list_query = Model.objects.filter(shop_id=shop_id, operate_time__range=[from_date, end_date])
if operator_ids:
log_list_query = log_list_query.filter(operator_id__in=operator_ids)
log_list_query = log_list_query.order_by("-operate_time")
log_list = log_list_query.all()
return log_list
def dict_log_ids_from_operate_log_unify_by_filter(
shop_id: int,
module_ids: list,
operator_ids: list,
from_date: datetime,
end_date: datetime,
):
"""
在统一表中查询所有日志的ID
:param shop_id:
:param module_ids:
:param operator_ids:
:param from_date:
:param end_date:
:return:
"""
unify_log_list_query = (
OperateLogUnify.objects.filter(shop_id=shop_id, operate_time__range=[from_date, end_date])
.exclude(operator_id=0)
)
if module_ids:
unify_log_list_query = unify_log_list_query.filter(
operate_module__in=module_ids
)
if operator_ids:
unify_log_list_query = unify_log_list_query.filter(
operator_id__in=operator_ids
)
unify_log_list_query = unify_log_list_query.order_by("-operate_time")
unify_log_list = unify_log_list_query.all()
unify_log_dict = defaultdict(list)
for log in unify_log_list:
unify_log_dict[log.operate_module].append(log.log_id)
return unify_log_dict
def dict_more_modules_log_by_filter(
shop_id: int,
module_ids: list,
operator_ids: list,
from_date: datetime,
end_date: datetime,
):
"""
查询多种模块的操作记录
:param shop_id:
:param module_ids:
:param operator_ids:
:param from_date:
:param end_date:
:return:
"""
log_type_2_ids_dict = dict_log_ids_from_operate_log_unify_by_filter(
shop_id, module_ids, operator_ids, from_date, end_date
)
log_type_2_log_list_dict = defaultdict(list)
for k, v in log_type_2_ids_dict.items():
log_list = list_one_module_log_by_ids(k, v)
log_type_2_log_list_dict[k] = log_list
return log_type_2_log_list_dict<file_sep>/wsc_django/wsc_django/apps/groupon/serializers.py
from rest_framework import serializers
from groupon.services import create_groupon, update_groupon
from wsc_django.utils.constant import DateFormat
from wsc_django.utils.core import FuncField
class AdminGrouponCreateSerializer(serializers.Serializer):
"""后台拼团活动创建序列化器"""
price = FuncField(lambda value: round(float(value), 2), label="拼团价格")
from_datetime = serializers.DateTimeField(format=DateFormat.TIME, label="拼团活动开始时间")
to_datetime = serializers.DateTimeField(format=DateFormat.TIME, label="拼团活动结束时间")
groupon_type = serializers.IntegerField(label="拼团活动类型 1:普通 2:老带新")
success_size = serializers.IntegerField(label="成团人数")
quantity_limit = serializers.IntegerField(label="成团数量上限")
success_limit = serializers.IntegerField(label="成团上限")
attend_limit = serializers.IntegerField(label="参团上限")
success_valid_hour = serializers.IntegerField(label="开团有效时间")
def create(self, validated_data):
shop_id = self.context["self"].current_shop.id
user_id = self.context["self"].current_user.id
product = self.context["product"]
groupon = create_groupon(
shop_id, user_id, product, validated_data
)
return groupon
def update(self, instance, validated_data):
shop_id = self.context["self"].current_shop.id
user_id = self.context["self"].current_user.id
product = self.context["product"]
instance = update_groupon(
shop_id, user_id, product, instance, validated_data
)
return instance
class SponsorSerializer(serializers.Serializer):
"""团长信息,只需基本信息,所以新建一个序列化器"""
nickname = serializers.CharField(required=False, label="微信昵称")
sex = serializers.IntegerField(required=False, label="性别")
head_image_url = serializers.CharField(required=False, label="头像")
class GrouponBasicSerializer(serializers.Serializer):
"""拼团活动基本信息序列化器类"""
groupon_type = serializers.IntegerField(label="拼团活动类型 1:普通 2:老带新")
success_valid_hour = serializers.IntegerField(label="开团有效时间")
succeeded_count = serializers.IntegerField(label="成团数")
success_limit = serializers.IntegerField(label="成团上限")
class GrouponProductSerializer(serializers.Serializer):
"""拼团商品序列化器类"""
product_id = serializers.IntegerField(source="id", label="货品ID")
name = serializers.CharField(label="货品名称")
price = FuncField(lambda value: round(float(value), 2), label="货品价格")
status = serializers.IntegerField(read_only=True, label="货品状态")
summary = serializers.CharField(label="货品简介")
cover_image_url = serializers.CharField(label="货品封面图")
class AdminGrouponsSerializer(GrouponBasicSerializer):
"""后台拼团活动列表序列化器类"""
groupon_id = serializers.IntegerField(source="id", label="拼团id")
product = GrouponProductSerializer(label="拼团商品信息")
price = FuncField(lambda value: round(float(value), 2), label="拼团价格")
attend_limit = serializers.IntegerField(label="参团上限")
from_datetime = serializers.DateTimeField(format=DateFormat.TIME, label="拼团活动开始时间")
to_datetime = serializers.DateTimeField(format=DateFormat.TIME, label="拼团活动结束时间")
status = serializers.IntegerField(label="拼团活动状态 1:启用 2:停用 3:过期")
is_editable = serializers.BooleanField(label="拼团是否可以编辑")
class AdminGrouponSerializer(GrouponBasicSerializer):
"""后台拼团活动详情序列化器类"""
groupon_id = serializers.IntegerField(source="id", label="拼团id")
product = GrouponProductSerializer(label="拼团商品信息")
price = FuncField(lambda value: round(float(value), 2), label="拼团价格")
attend_limit = serializers.IntegerField(label="参团上限")
from_datetime = serializers.DateTimeField(format=DateFormat.TIME, label="拼团活动开始时间")
to_datetime = serializers.DateTimeField(format=DateFormat.TIME, label="拼团活动结束时间")
status = serializers.IntegerField(label="拼团活动状态 1:启用 2:停用 3:过期")
quantity_limit = serializers.IntegerField(label="成团数量上限")
success_size = serializers.IntegerField(label="成团人数")
class GrouponAttendBasicSerializer(serializers.Serializer):
"""拼团参与基本信息序列化器类"""
groupon_attend_id = serializers.IntegerField(source="id", label="拼团参与id")
size = serializers.IntegerField(label="拼团当前参与人数")
success_size = serializers.IntegerField(label="成团人数")
to_datetime = serializers.DateTimeField(format=DateFormat.TIME, label="拼团参与结束时间")
class AdminGrouponAttendSerializer(GrouponAttendBasicSerializer):
"""后台拼团参与序列化器类"""
anonymous_size = serializers.IntegerField(label="匿名用户数量")
sponsor = SponsorSerializer(label="团长信息")
status = serializers.IntegerField(label="拼团参与状态 1:拼团中 2:已成团 3:已失败")
failed_reason = serializers.CharField(label="失败原因")
groupon = GrouponBasicSerializer(label="团基本信息")
create_time = serializers.DateTimeField(source="create_at", format=DateFormat.TIME, label="开团时间")
success_time = serializers.DateTimeField(
source="update_at", format=DateFormat.TIME, label="成团时间(数据改变时间)"
)
<file_sep>/wsc_django/wsc_django/apps/customer/migrations/0003_auto_20210606_2054.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('shop', '0001_initial'),
('customer', '0002_mineaddress_shop'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='mineaddress',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='address', to=settings.AUTH_USER_MODEL, verbose_name='顾客ID'),
),
migrations.AddField(
model_name='customerpoint',
name='customer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='customer.customer', verbose_name='对应客户对象'),
),
migrations.AddField(
model_name='customer',
name='shop',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shop.shop', verbose_name='客户对应的店铺对象'),
),
migrations.AddField(
model_name='customer',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='客户对应的用户对象'),
),
]
<file_sep>/wsc_django/wsc_django/apps/shop/migrations/0001_initial.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='HistoryRealName',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('realname', models.CharField(max_length=32, verbose_name='历史真实姓名')),
],
options={
'verbose_name': '商铺创建者历史真实姓名',
'verbose_name_plural': '商铺创建者历史真实姓名',
'db_table': 'history_realname',
},
),
migrations.CreateModel(
name='PayChannel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('smerchant_no', models.CharField(default='', max_length=15, verbose_name='商户号')),
('smerchant_name', models.CharField(default='', max_length=100, verbose_name='商户名')),
('smerchant_type_id', models.CharField(default='', max_length=15, verbose_name='商户类别id')),
('smerchant_type_name', models.CharField(default='', max_length=81, verbose_name='商户类别名')),
('pos_id', models.CharField(default='', max_length=9, verbose_name='柜台号')),
('terminal_id1', models.CharField(default='', max_length=50, verbose_name='终端号1')),
('terminal_id2', models.CharField(default='', max_length=50, verbose_name='终端号2')),
('access_token', models.CharField(default='', max_length=32, verbose_name='扫呗access_token')),
('clearing_rate', models.FloatField(default=2.8, verbose_name='商户的清算费率,利楚默认是千分之2.8,建行是0')),
('clearing_account_id', models.IntegerField(default=0, verbose_name='商户的清算账号ID')),
('channel_type', models.SmallIntegerField(default=0, verbose_name='支付渠道, 1:利楚, 2:建行')),
('pub_key', models.CharField(max_length=500, verbose_name='账户公匙')),
('province', models.CharField(default='Hubei', max_length=32, verbose_name='用户所在省份')),
],
options={
'verbose_name': '支付渠道',
'verbose_name_plural': '支付渠道',
'db_table': 'pay_channel',
},
),
migrations.CreateModel(
name='Shop',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('status', models.SmallIntegerField(default=2, verbose_name='商铺状态 0: 已关闭 1: 正常,审核通过, 2: 审核中, 3: 已拒绝')),
('shop_name', models.CharField(max_length=128, verbose_name='商铺名称')),
('shop_code', models.CharField(default='', max_length=16, verbose_name='随机字符串,用于代替id')),
('shop_phone', models.CharField(default='', max_length=32, verbose_name='联系电话')),
('shop_img', models.CharField(default='', max_length=300, verbose_name='门头照片')),
('business_licence', models.CharField(default='', max_length=300, verbose_name='营业执照')),
('shop_address', models.CharField(default='', max_length=100, verbose_name='商铺地址')),
('shop_county', models.IntegerField(default=0, verbose_name='商铺所在国家编号')),
('shop_province', models.IntegerField(default=0, verbose_name='商铺所在省份编号')),
('shop_city', models.IntegerField(default=0, verbose_name='商铺所在城市编号')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='商铺创建时间')),
('description', models.CharField(default='', max_length=256, verbose_name='商铺描述')),
('inviter_phone', models.CharField(default='', max_length=32, verbose_name='推荐人手机号')),
('cerify_active', models.SmallIntegerField(default=1, verbose_name='是否认证,1:是,0:否')),
('shop_verify_type', models.SmallIntegerField(default=1, verbose_name='商铺类型,0:企业,1:个人')),
('shop_verify_content', models.CharField(max_length=200, verbose_name='认证内容(公司名称)')),
('pay_active', models.SmallIntegerField(default=1, verbose_name='是否开通线上支付,1:是,0:否')),
],
options={
'verbose_name': '商铺',
'verbose_name_plural': '商铺',
'db_table': 'shop',
},
),
migrations.CreateModel(
name='ShopRejectReason',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('reject_reason', models.CharField(default='', max_length=256, verbose_name='拒绝理由')),
],
options={
'verbose_name': '商铺拒绝理由',
'verbose_name_plural': '商铺拒绝理由',
'db_table': 'shop_reject_reason',
},
),
]
<file_sep>/wsc_django/wsc_django/apps/staff/services.py
from django.db.models import Q
from shop.models import Shop
from user.constant import USER_OUTPUT_CONSTANT
from user.models import User
from staff.models import Staff, StaffApply
from staff.constant import (
StaffPermission,
StaffRole,
StaffStatus,
StaffApplyExpired,
)
def cal_all_permission():
"""计算所有的权限"""
permissions = 0
for k, v in vars(StaffPermission).items():
if not k.startswith("_"):
permissions |= v
return permissions
def cal_all_roles_without_super():
"""计算所有的角色,除超管"""
roles = 0
for k, v in vars(StaffRole).items():
if not k.startswith("_") and v != StaffRole.SHOP_SUPER_ADMIN:
roles |= v
return roles
def create_staff(staff_info: dict):
"""
:param staff_info: {
"shop_id": 1,
"user_id": 1,
"roles": 255,
"permissions": 63,
"position": "",
"entry_date": "2019-10-14",
"remark": ""
}
:return:
"""
staff = Staff(**staff_info)
staff.save()
return staff
def create_super_admin_staff(shop: Shop, user: User):
"""
创建一个超级管理员
:param shop:
:param super_amdin:
:return:
"""
# 计算所有权限,超级管理员拥有所有权限
permissions = 0
for k, v in vars(StaffPermission).items():
if not k.startswith("__"):
permissions |= v
staff = Staff(
shop=shop,
user=user,
roles=StaffRole.SHOP_SUPER_ADMIN,
permissions=permissions
)
staff.save()
return staff
def create_staff_apply(staff_apply_info: dict):
"""创建一条员工申请信息"""
staff = StaffApply(**staff_apply_info)
staff.save()
return staff
def expire_staff_apply_by_staff(shop_id: int, user_id: int):
"""
删除员工时使其申请记录过期
:param shop_id:
:param user_id:
:return:
"""
staff_apply_list = list_staff_apply_by_shop_id_and_user_id(shop_id, user_id, filter_expired=True)
for sal in staff_apply_list:
sal.expired = StaffApplyExpired.YES
sal.save()
def get_staff_by_user_id_and_shop_id_with_user(user_id: int, shop_id: int):
"""
通过shop_id, user_id获取员工以及员工的用户信息
:param user_id:
:param shop_id:
:return:
"""
staff = Staff.objects.filter(shop_id=shop_id, user_id=user_id).first()
for key in USER_OUTPUT_CONSTANT:
setattr(staff, key, getattr(staff.user, key))
return staff
def get_staff_by_user_id_and_shop_id(user_id: int, shop_id: int, filter_delete: bool = True):
"""
通过shop_id和user_id获取员工,不带user信息
:param user_id:
:param shop_id:
:param filter_delete: 过滤删除
:return:
"""
staff_query = Staff.objects.filter(shop_id=shop_id, user_id=user_id)
if staff_query and filter_delete:
staff_query = staff_query.filter(status=StaffStatus.NORMAL)
staff = staff_query.first()
return staff
def get_staff_apply_by_user_id_and_shop_id(user_id: int, shop_id: int, filter_expired: bool = True):
"""
通过店铺ID和用户ID获取一个人的最新员工申请记录
:param shop_id:
:param user_id:
:param filter_expired: 过滤过期
:return:
"""
staff_apply_query = StaffApply.objects.filter(shop_id=shop_id, user_id=user_id)
if staff_apply_query and filter_expired:
staff_apply_query = staff_apply_query.filter(expired=StaffApplyExpired.NO)
staff_apply = staff_apply_query.first()
return staff_apply
def get_staff_apply_by_shop_id_and_id(
shop_id: int, staff_apply_id: int, filter_expired: bool = True
):
"""
通过shop_id和id获取一个申请记录, 未过期只会有一条
:param shop_id:
:param staff_apply_id:
:param filter_expired: 过滤过期
:return:
"""
staff_apply_query = StaffApply.objects.filter(shop_id=shop_id, id=staff_apply_id)
if filter_expired:
staff_apply_query = staff_apply_query.filter(expired=StaffApplyExpired.NO)
staff_apply = staff_apply_query.first()
return staff_apply
def get_staff_by_id_and_shop_id(
staff_id: int, shop_id: int, filter_delete: bool = True
):
"""
通过staff_id和shop_id查询一个员工
:param staff_id:
:param shop_id:
:param filter_delete: 过滤删除
:return:
"""
staff_query = Staff.objects.filter(id=staff_id, shop_id=shop_id)
if filter_delete:
staff_query = staff_query.filter(status=StaffStatus.NORMAL)
staff = staff_query.first()
for _ in USER_OUTPUT_CONSTANT:
setattr(staff, _, getattr(staff.user, _))
return staff
def list_staff_by_user_id(user_id: int, roles: int = None, filter_delete: bool = True):
"""
查询这个用户在所有店铺的员工信息
:param user_id:
:param roles:
:param filter_delete: 过滤删除
:return:
"""
staff_list_query = Staff.objects.filter(user_id=user_id)
if staff_list_query and filter_delete:
staff_list_query = staff_list_query.filter(status=StaffStatus.NORMAL)
if staff_list_query and roles:
staff_list_query = staff_list_query.extra(where=['roles & 1'])
staff_list = staff_list_query.all()
return staff_list
def list_staff_apply_by_shop_id(shop_id: int):
"""
查询一个店铺的员工申请记录
:param shop_id:
:return:
"""
staff_apply_list = StaffApply.objects.filter(shop_id=shop_id).order_by('id').all()
return staff_apply_list
def list_staff_apply_by_shop_id_and_user_id(
shop_id: int, user_id: int, filter_expired: bool = False
):
"""
查询一个人在一个店铺的所有的申请记录-未过期的理论只会有一条
:param shop_id:
:param user_id:
:param filter_expired: 过滤过期
:return:
"""
staff_apply_query = StaffApply.objects.filter(shop_id=shop_id, user_id=user_id)
if filter_expired:
staff_apply_query = staff_apply_query.filter(expired=StaffApplyExpired.NO)
staff_apply = staff_apply_query.all()
return staff_apply
def list_staff_by_shop_id(shop_id: int, keyword: str = None):
"""
查询一个店铺的所有员工信息, 过滤已删除员工
:param shop_id:
:param keyword: 过滤关键词
:return:
"""
if keyword:
staff_list_query = (
Staff.objects.filter(shop_id=shop_id, status=StaffStatus.NORMAL)
.filter(
Q(user__realname__contains=keyword) |
Q(user__nickname__contains=keyword) |
Q(user__phone__contains=keyword)
)
)
else:
staff_list_query = Staff.objects.filter(shop_id=shop_id, status=StaffStatus.NORMAL)
staff_list = staff_list_query.order_by("id").all()
for staff in staff_list:
for _ in USER_OUTPUT_CONSTANT:
setattr(staff, _, getattr(staff.user, _))
return staff_list
def list_staff_by_shop_id_with_user(shop_id: int):
"""查询出一个店铺的所有员工的user信息"""
staff_user_list = []
staff_list = Staff.objects.filter(shop_id=shop_id).all()
for staff in staff_list:
staff_user_list.append(staff.user)
return staff_user_list<file_sep>/wsc_django/wsc_django/apps/user/models.py
from django.db import models
from django.conf import settings
from django.contrib.auth.models import AbstractUser
from itsdangerous import TimedJSONWebSignatureSerializer as TJWSSerializer, BadData
from user.constant import Sex, VERIFY_EMAIL_TOKEN_EXPIRES
from wsc_django.utils.models import TimeBaseModel
class User(AbstractUser):
"""用户模型类"""
phone = models.CharField(max_length=11, unique=True, verbose_name="手机号")
email = models.EmailField(unique=True, max_length=254, null=True, verbose_name="邮箱")
password = models.CharField(null=True, max_length=128, verbose_name="密码(密文)")
wx_unionid = models.CharField(max_length=64, null=True, verbose_name="微信unionid")
sex = models.SmallIntegerField(
null=True,
default=Sex.UNKNOWN,
verbose_name="用户性别 0:未知 1:男 2:女",
)
nickname = models.CharField(max_length=64, default="", verbose_name="用户昵称")
realname = models.CharField(max_length=64, default="",verbose_name="用户真姓名")
birthday = models.DateField(null=True, verbose_name="用户生日")
head_image_url = models.CharField(max_length=1024, verbose_name="用户头像URL(存完整的)")
wx_openid = models.CharField(null=True, max_length=64, verbose_name="微信openid")
wx_country = models.CharField(null=True, max_length=32, verbose_name="用户所在国家")
wx_province = models.CharField(null=True, max_length=32, verbose_name="用户所在省份")
wx_city = models.CharField(null=True, max_length=32, verbose_name="用户所在城市")
email_active = models.BooleanField(default=False, verbose_name='邮箱验证状态')
class Meta:
db_table = "user"
verbose_name = "用户"
verbose_name_plural = verbose_name
indexes = [
models.Index(name="ux_phone", fields=["phone"]),
models.Index(name="ux_wx_unionid", fields=["wx_unionid"])
]
def generate_verify_email_url(self):
"""
生成验证邮箱的url
"""
serializer = TJWSSerializer(settings.SECRET_KEY, expires_in=VERIFY_EMAIL_TOKEN_EXPIRES)
data = {'user_id': self.id, 'email': self.email}
token = serializer.dumps(data).decode()
if settings.DEBUG:
verify_url = 'http://127.0.0.1:3030/#/email-verify?token=' + token
else:
verify_url = 'http://hzhst1314.cn/#/email-verify?token=' + token
return verify_url
@staticmethod
def check_verify_email_token(token):
serializer = TJWSSerializer(settings.SECRET_KEY, expires_in=VERIFY_EMAIL_TOKEN_EXPIRES)
try:
data = serializer.loads(token)
except BadData:
return None
else:
user_id = data['user_id']
email = data['email']
try:
user = User.objects.get(id=user_id, email=email)
except User.DoesNotExist:
return None
else:
return user
class UserOpenid(TimeBaseModel):
"""用户openid与appid的对应关系"""
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name="useropenid", null=False, verbose_name="对应的用户对象")
wx_openid = models.CharField(max_length=64, null=False, verbose_name="用户在对应公众号的openid")
mp_appid = models.CharField(max_length=64, null=False, verbose_name="公众号的appid(特殊的,对于利楚服务商支付,格式为lcwx-[shop_id])")
class Meta:
db_table = "user_openid"
verbose_name = "用户openid"
verbose_name_plural = verbose_name
def set_wx_openid(self, wx_openid):
"""设置wx_openid"""
self.wx_openid = wx_openid
<file_sep>/wsc_django/wsc_django/apps/promotion/constant.py
class PromotionType:
"""活动类型"""
NORMAL = 0 # 没活动
GROUPON = 1 # 拼团活动<file_sep>/wsc_django/wsc_django/apps/dashboard/constant.py
class StatisticType:
DAILY = 1 # 日统计
MONTHLY = 3 # 月统计
YEARLY = 4 # 年统计
<file_sep>/wsc_django/wsc_django/apps/celery_tasks/main.py
import os
from celery import Celery
from django.conf import settings
# 设置环境变量
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
# 创建celery应用
app = Celery('wsc_django', backend="")
app.config_from_object('celery_tasks.config')
# 如果在工程的应用中创建了tasks.py模块,那么Celery应用就会自动去检索创建的任务。比如你添加了一个任#务,在django中会实时地检索出来。
app.autodiscover_tasks(['celery_tasks.celery_auto_word'])<file_sep>/wsc_django/wsc_django/apps/groupon/urls.py
"""
店铺设置相关路由
"""
from django.urls import path, re_path
from groupon import views
urlpatterns_admin = [
path('api/admin/groupon/', views.AdminGrouponView.as_view()), # 创建拼团&编辑拼团&拼团活动详情
path(
'api/admin/groupon/period/verification/', views.AdminGrouponPeriodVerificationView.as_view()
), # 验证拼团时间合法性
path('api/admin/groupon/off/', views.AdminGrouponOffView.as_view()), # 停用拼团
path('api/admin/groupons/', views.AdminGrouponsView.as_view()), # 拼团活动列表页
path('api/admin/groupon-attends/', views.AdminGrouponAttendsView.as_view()), # 拼团参与列表
path('api/admin/groupon-attend/', views.AdminGrouponAttendView.as_view()), # 拼团参与详情
path(
'api/admin/gruopon-attend/orders/', views.AdminGrouponAttendOrdersView.as_view()
), # 后台获取一个团的所有参团订单
path(
'api/admin/groupon-attend/success/force/', views.AdminGrouponAttendSuccessForceView.as_view()
), # 强制成团
]
urlpatterns_mall = [
re_path(
r'api/mall/(?P<shop_code>\w+)/groupon-attend/initation/', views.MallGrouponAttendInitationView.as_view()
), # 开团
]
urlpatterns = urlpatterns_mall + urlpatterns_admin<file_sep>/wsc_django/wsc_django/apps/dashboard/urls.py
"""
店铺设置相关路由
"""
from django.urls import path
from dashboard import views
urlpatterns = [
path("api/admin/dashboard/shop/data/", views.AdminDashboardShopDataView.as_view()), # 店铺数据概览
path("api/admin/dashboard/order/data/", views.AdminDashboardOrderDataView.as_view()), # 店铺订单数据
path("api/admin/dashboard/product/data/", views.AdminDashboardProductDataView.as_view()), # 店铺商品数据
]
<file_sep>/wsc_django/wsc_django/apps/user/constant.py
USER_OUTPUT_CONSTANT = (
"head_image_url",
"nickname",
"realname",
"sex",
"phone",
"birthday",
# "passport_id",
)
class Sex:
UNKNOWN = 0
MALE = 1
FEMALE = 2
class UserLoginType:
WX = 0
PWD = 1
PHONE = 2
VERIFY_EMAIL_TOKEN_EXPIRES = 24 * 60 * 60
<file_sep>/wsc_django/wsc_django/apps/user/serializers.py
from rest_framework import serializers
from user.models import User
from user.services import create_user
from wsc_django.utils.constant import DateFormat
class UserCreateSerializer(serializers.Serializer):
"""创建用户序列化器类"""
username = serializers.CharField(required=True, label="用户名,jwt必须")
phone = serializers.CharField(required=True, label="手机号")
password = serializers.CharField(required=False, label="密码")
wx_unionid = serializers.CharField(required=False, max_length=64, label="微信unionid")
sex = serializers.IntegerField(required=False, label="用户性别 0:未知 1:男 2:女")
nickname = serializers.CharField(required=True, max_length=64, label="用户昵称")
realname = serializers.CharField(required=False, max_length=64, label="用户真姓名")
birthday = serializers.DateField(required=False, label="用户生日")
head_image_url = serializers.CharField(required=True, max_length=1024, label="用户头像URL(存完整的)")
wx_openid = serializers.CharField(required=False, max_length=64, label="微信openid")
wx_country = serializers.CharField(required=False, max_length=32, label="用户所在国家")
wx_province = serializers.CharField(required=False, max_length=32, label="用户所在省份")
wx_city = serializers.CharField(required=False, max_length=32, label="用户所在城市")
def create(self, validated_data):
user = create_user(validated_data)
return user
class UserSerializer(serializers.Serializer):
"""用户序列化器类"""
realname = serializers.CharField(required=False, label="用户真实姓名")
nickname = serializers.CharField(required=False, label="微信昵称")
sex = serializers.IntegerField(required=False, label="性别")
phone = serializers.CharField(required=False, label="手机号")
birthday = serializers.DateField(required=False, format=DateFormat.DAY, default="", label="用户生日")
head_image_url = serializers.CharField(required=False, label="头像")
class operatorSerializer(serializers.Serializer):
"""审核操作人序列化器类"""
operate_id = serializers.IntegerField(label="操作人id")
operate_name = serializers.CharField(label="操作人名称")
operate_img = serializers.CharField(label="操作人头像")
class SuperUserSerializer(UserSerializer):
"""总后台用户详情序列化器类"""
user_id = serializers.IntegerField(source="id", label="用户id")
username = serializers.CharField(label="用户名")
email_active = serializers.BooleanField(label="是否激活邮箱")
email = serializers.EmailField(label="邮箱")
class EmailSerializer(serializers.ModelSerializer):
"""用户邮箱序列化器类"""
class Meta:
model = User
fields = ('id', 'email')
def update(self, instance, validated_data):
email = validated_data['email']
instance.email = email
instance.save()
return instance<file_sep>/wsc_django/wsc_django/apps/delivery/models.py
import decimal
from django.db import models
from shop.models import Shop
from order.constant import OrderDeliveryMethod
from delivery.constant import (
DeliveryType,
)
from wsc_django.utils.models import TimeBaseModel
class Delivery(TimeBaseModel):
"""配送记录模型类"""
delivery_type = models.SmallIntegerField(
null=False,
default=DeliveryType.ExpressDelivery,
verbose_name="配送方式",
)
company = models.CharField(max_length=32, verbose_name="快递公司,仅在配送方式为快递时才有")
express_num = models.CharField(max_length=32, verbose_name="快递单号,仅在配送方式为快递时才有")
class Meta:
db_table = "delivery"
verbose_name = "配送记录"
verbose_name_plural = verbose_name
class DeliveryConfig(TimeBaseModel):
"""订单配送配置模型类"""
id = models.OneToOneField(
Shop,
primary_key=True,
null=False,
on_delete=models.CASCADE,
verbose_name="店铺id"
).primary_key
# 配送模式
home_on = models.BooleanField(null=False, default=True, verbose_name="配送模式是否开启")
home_minimum_order_amount = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
default=0,
verbose_name="配送模式起送金额",
)
home_delivery_amount = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
default=0,
verbose_name="配送模式配送费",
)
home_minimum_free_amount = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
default=0,
verbose_name="配送模式免配送费最小金额",
)
# 自提模式
pick_on = models.BooleanField(null=False, default=True, verbose_name="自提模式是否开启")
pick_service_amount = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
default=0,
verbose_name="自提模式服务费",
)
pick_minimum_free_amount = models.DecimalField(
max_digits=13,
decimal_places=4,
null=False,
default=0,
verbose_name="自提模式免服务费最小金额",
)
pick_today_on = models.BooleanField(null=False, default=True, verbose_name="今天自提是否开启")
pick_tomorrow_on = models.BooleanField(null=False, default=True, verbose_name="明天自提是否开启")
class Meta:
db_table = "delivery_config"
verbose_name = "配送配置"
verbose_name_plural = verbose_name
def limit(self, delivery_method, order_amount):
"""
订单配送限制
:param delivery_method: 配送方式
:param order_amount: 订单总价
:return:
"""
if (
delivery_method == OrderDeliveryMethod.HOME_DELIVERY
and self.home_minimum_order_amount > order_amount
):
return False
return True
def calculate(self, delivery_method, order_amount):
"""
订单优惠计算,返回运费可以优惠的金额
:param delivery_method: 配送方式
:param order_amount: 订单总价
:return:
"""
if delivery_method == OrderDeliveryMethod.HOME_DELIVERY:
if order_amount < self.home_minimum_free_amount:
result = decimal.Decimal(0)
else:
result = self.home_delivery_amount
else:
if order_amount < self.pick_minimum_free_amount:
result = decimal.Decimal(0)
else:
result = self.pick_service_amount
return result
def get_delivery_amount_gross(self, delivery_method):
""" 获取优惠前运费 """
if delivery_method == OrderDeliveryMethod.HOME_DELIVERY:
result = self.home_delivery_amount
else:
result = self.pick_service_amount
return result
def is_delivery_method_valid(self, delivery_method):
""" 检查配送方式是由有效 """
if (
delivery_method == OrderDeliveryMethod.HOME_DELIVERY and not self.home_on
) or (
delivery_method == OrderDeliveryMethod.CUSTOMER_PICK and not self.pick_on
):
return False
return True
class PickPeriodConfigLine(TimeBaseModel):
"""自提时间段模型类"""
delivery_config = models.ForeignKey(
DeliveryConfig,
null=False,
on_delete=models.CASCADE,
verbose_name="订单配送配置对象"
)
from_time = models.CharField(max_length=16, null=False, verbose_name="自提起始时间")
to_time = models.CharField(max_length=16, null=False, verbose_name="自提终止时间")
class Meta:
db_table = "pick_period_config_line"
verbose_name = "自提时间段"
verbose_name_plural = verbose_name
<file_sep>/wsc_django/wsc_django/apps/groupon/migrations/0001_initial.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
import django.db.models.deletion
import promotion.abstract
class Migration(migrations.Migration):
initial = True
dependencies = [
('customer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Groupon',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('price', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='商品拼团价格')),
('from_datetime', models.DateTimeField(verbose_name='拼团活动开始时间')),
('to_datetime', models.DateTimeField(verbose_name='拼团活动结束时间')),
('groupon_type', models.SmallIntegerField(verbose_name='拼团活动类型 1:普通 2:老带新')),
('success_size', models.SmallIntegerField(default=1, verbose_name='成团人数')),
('quantity_limit', models.IntegerField(default=0, verbose_name='购买数量上限(限制每个订单的购买数量)')),
('success_limit', models.IntegerField(default=0, verbose_name='成团数量上限(限制单次活动的最大成团数量)')),
('attend_limit', models.IntegerField(default=0, verbose_name='参团数量上限(每个用户能参加同一拼团的次数)')),
('success_valid_hour', models.IntegerField(default=24, verbose_name='开团有效时间(超过此时间未成团的活动将自动解散)')),
('status', models.SmallIntegerField(default=1, verbose_name='拼团活动状态 1:启用 2:停用 3:过期')),
('succeeded_count', models.IntegerField(default=0, verbose_name='成团数')),
('succeeded_quantity', models.DecimalField(decimal_places=4, default=0, max_digits=13, verbose_name='成团件数')),
('is_editable', models.BooleanField(default=True, verbose_name='是否可以编辑')),
],
options={
'verbose_name': '拼团活动',
'verbose_name_plural': '拼团活动',
'db_table': 'groupon',
},
),
migrations.CreateModel(
name='GrouponAttend',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('size', models.IntegerField(default=0, verbose_name='拼团当前参与人数')),
('anonymous_size', models.IntegerField(default=0, verbose_name='强制成团添加的匿名用户数量')),
('success_size', models.IntegerField(verbose_name='成团人数')),
('to_datetime', models.DateTimeField(verbose_name='拼团参与结束时间')),
('status', models.SmallIntegerField(default=0, verbose_name='拼团参与状态 -1: 超时未支付 0:已创建 1:拼团中 2:已成团 3:已失败')),
('failed_reason', models.CharField(default='', max_length=64, verbose_name='失败原因')),
('groupon', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='groupon.groupon', verbose_name='拼团活动')),
],
options={
'verbose_name': '拼团参与表',
'verbose_name_plural': '拼团参与表',
'db_table': 'groupon_attend',
},
bases=(models.Model, promotion.abstract.AbstractPromotionRule),
),
migrations.CreateModel(
name='GrouponAttendDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_sponsor', models.BooleanField(default=True, verbose_name='是否是团长')),
('is_new_customer', models.BooleanField(default=False, verbose_name='是否是新用户')),
('status', models.SmallIntegerField(default=0, verbose_name='参团状态 -1:超时未支付 0:未支付 1:已支付')),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='customer.customer', verbose_name='参与客户')),
('groupon_attend', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='groupon.grouponattend', verbose_name='拼团参与')),
],
options={
'verbose_name': '拼团参与详情表',
'verbose_name_plural': '拼团参与详情表',
'db_table': 'groupon_attend_detail',
},
),
]
<file_sep>/wsc_django/wsc_django/apps/order/urls.py
"""
订单相关的路由
"""
from django.urls import path, re_path
from order import views
urlpatterns_admin = [
path('api/admin/orders/', views.AdminOrdersView.as_view()), # 后台获取订单列表
path('api/admin/order/', views.AdminOrderView.as_view()), # 后台获取订单详情
path('api/admin/order/print/', views.AdminOrderPrintView.as_view()), # 后台打印订单
path('api/admin/order/confirm/', views.AdminOrderConfirmView.as_view()), # 后台确认订单
path('api/admin/order/direct/', views.AdminOrderDirectView.as_view()), # 后台一键完成订单
path('api/admin/order/finish/', views.AdminOrderFinishView.as_view()), # 后台完成订单
path('api/admin/order/refund/', views.AdminOrderRefundView.as_view()), # 后台订单退款
path('api/admin/order/operate-log/', views.AdminOrderOperateLogView.as_view()), # 后台获取订单日志
path('api/admin/order/paid/count/', views.AdminOrderPaidCountView.as_view()), # 未处理订单数
path('api/admin/abnormal-order/count/', views.AdminAbnormalOrderCountView.as_view()), # 获取异常订单数量
path('api/admin/abnormal-orders/', views.AdminAbnormalOrdersView.as_view()), # 后台获取异常订单列表
]
urlpatterns_mall = [
re_path(r'^api/mall/(?P<shop_code>\w+)/order/$', views.MallOrderView.as_view()), # 提交订单&订单详情
re_path(r'^api/mall/(?P<shop_code>\w+)/order/cart/verify/$', views.MallCartVerifyView.as_view()), # 购物篮内验证
re_path(r'^api/mall/(?P<shop_code>\w+)/order/product/verify/$', views.MallProductVerifyView.as_view()), # 确认订单前验证
re_path(r'^api/mall/(?P<shop_code>\w+)/orders/', views.MallOrdersView.as_view()), # 商城获取客户订单列表
path('api/mall/order/cancellation/', views.MallOrderCancellationView.as_view()), # 商城订单取消
path('api/mall/order/payment/', views.MallOrderPaymentView.as_view()), # 商城订单支付
]
urlpatterns = urlpatterns_admin + urlpatterns_mall
<file_sep>/wsc_django/wsc_django/apps/pvuv/views.py
import datetime
from webargs import fields, validate
from webargs.djangoparser import use_args
from product.constant import ProductStatus
from pvuv.interface import list_product_ids_by_shop_id_interface
from pvuv.serializers import ProductBrowseRecordsSerializer
from pvuv.services import list_product_browse_record_by_id, create_product_browse_record
from wsc_django.utils.arguments import StrToDict
from wsc_django.utils.pagination import StandardResultsSetPagination
from wsc_django.utils.views import AdminBaseView, MallBaseView
_MAP_BROWSE_RECORD = {}
def register_browse_record(type):
def register(func):
_MAP_BROWSE_RECORD[type] = func
return func
return register
class AdminProductBrowseRecordsView(AdminBaseView):
"""后台-商品访问记录"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_PRODUCT]
)
@use_args(
{
"product_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="货品ID"
),
},
location="query"
)
def get(self, request, args):
shop_id = self.current_shop.id
args["to_date"] = datetime.date.today() + datetime.timedelta(1)
args["from_date"] = datetime.date.today() - datetime.timedelta(7)
record_list = list_product_browse_record_by_id(shop_id, **args)
record_list = self._get_paginated_data(record_list, ProductBrowseRecordsSerializer)
return self.send_success(data_list=record_list)
class MallBrowseRecord(MallBaseView):
"""商城-创建浏览记录"""
@register_browse_record("product")
def gen_product_browse_record(self, args: dict):
product_id = int(args["spa_params"]["product_id"])
product_ids = list_product_ids_by_shop_id_interface(
self.current_shop.id, [ProductStatus.ON, ProductStatus.OFF]
)
if product_id not in product_ids:
return False, "货品不存在"
info = {
"shop_id": self.current_shop.id,
"user_id": self.current_user.id,
"product_id": product_id,
"start_time": args["start_time"],
"duration": args["duration"],
"pre_page_name": args["pre_page"].get("name"),
"next_page_name": args["next_page"].get("name"),
}
create_product_browse_record(info)
return True, None
@use_args(
{
"fullpath": fields.String(
required=True, validate=[validate.Length(1, 256)], comment="url全路径"
),
"query": StrToDict(required=True, comment="路由里面的query参数"),
"cur_page": StrToDict(required=True, comment="当前页面, 包含type, name2个值, str"),
"pre_page": StrToDict(required=True, comment="上一个页面, 包含type, name2个值, str"),
"next_page": StrToDict(required=True, comment="下一个页面, 包含type, name2个值, str"),
"spa_query": StrToDict(required=True, comment="当前页面的一些参数"),
"spa_params": StrToDict(required=True, comment="当前页面的一些参数"),
"start_time": fields.DateTime(required=True, comment="进入当前页面的时间"),
"duration": fields.Integer(
required=True, validate=[validate.Range(0)], comment="在页面停留的时间"
),
},
location="json"
)
def post(self, request, args, shop_code):
self._set_current_shop(request, shop_code)
# 暂时只记录商品的访问记录, 以后再扩展
cur_page_type = args["cur_page"]["type"]
gen_browse_record_func = _MAP_BROWSE_RECORD[cur_page_type]
success, info = gen_browse_record_func(self, args)
if not success:
return self.send_fail(error_text=info)
return self.send_success()<file_sep>/wsc_django/wsc_django/apps/dashboard/views.py
from webargs import fields, validate
from webargs.djangoparser import use_args
from dashboard.constant import StatisticType
from dashboard.services import list_shop_dashboard_data, list_order_dashboard_data, list_product_dashboard_data
from wsc_django.utils.core import TimeFunc
from wsc_django.utils.views import AdminBaseView
class AdminDashboardShopDataView(AdminBaseView):
"""后台-店铺数据概览"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_DASHBORD]
)
@use_args(
{
"statistic_type": fields.Integer(
required=True,
validate=validate.OneOf(
[StatisticType.DAILY, StatisticType.MONTHLY, StatisticType.YEARLY]
),
),
"from_date": fields.String(missing="", comment="筛选起始日期"),
"to_date": fields.String(missing="", comment="筛选终止日期"),
},
location="query"
)
def get(self, request, args):
try:
from_date, to_date = TimeFunc.get_to_date_by_from_date(
args["from_date"], args["to_date"], args["statistic_type"]
)
except ValueError as e:
return self.send_fail(error_text=str(e))
_, data_list = list_shop_dashboard_data(
self.current_shop.id,
from_date,
to_date,
args["statistic_type"],
)
return self.send_success(data_list=data_list)
class AdminDashboardOrderDataView(AdminBaseView):
"""后台-店铺数据-订单数据"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_DASHBORD]
)
@use_args(
{
"statistic_type": fields.Integer(
required=True,
validate=validate.OneOf(
[StatisticType.DAILY, StatisticType.MONTHLY, StatisticType.YEARLY]
),
),
"from_date": fields.String(missing="", comment="筛选起始日期"),
"to_date": fields.String(missing="", comment="筛选终止日期"),
},
location="query"
)
def get(self, request, args):
try:
from_date, to_date = TimeFunc.get_to_date_by_from_date(
args["from_date"], args["to_date"], args["statistic_type"]
)
except ValueError as e:
return self.send_fail(error_text=str(e))
_, data_list = list_order_dashboard_data(
self.current_shop.id,
from_date,
to_date,
args["statistic_type"],
)
return self.send_success(data_list=data_list)
class AdminDashboardProductDataView(AdminBaseView):
"""后台-店铺数据-商品数据"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_DASHBORD]
)
@use_args(
{
"statistic_type": fields.Integer(
required=True,
validate=validate.OneOf(
[StatisticType.DAILY, StatisticType.MONTHLY, StatisticType.YEARLY]
),
),
"from_date": fields.String(missing="", comment="筛选起始日期"),
"to_date": fields.String(missing="", comment="筛选终止日期"),
},
location="query"
)
def get(self, request, args):
try:
from_date, to_date = TimeFunc.get_to_date_by_from_date(
args["from_date"], args["to_date"], args["statistic_type"]
)
except ValueError as e:
return self.send_fail(error_text=str(e))
_, data_list = list_product_dashboard_data(
self.current_shop.id, from_date, to_date
)
return self.send_success(data_list=data_list)
<file_sep>/wsc_django/wsc_django/apps/customer/models.py
from django.db import models
# Create your models here.
from customer.constant import MineAddressDefault, MineAddressStatus
from shop.models import Shop
from user.constant import Sex
from user.models import User
from wsc_django.utils.core import FormatAddress
from wsc_django.utils.models import TimeBaseModel
class Customer(TimeBaseModel):
"""客户模型类"""
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, null=False, verbose_name="客户对应的店铺对象")
user = models.ForeignKey(User, on_delete=models.CASCADE, null=False, verbose_name="客户对应的用户对象")
create_date = models.DateField(null=False, auto_now_add=True, verbose_name="客户新增日期")
consume_amount = models.DecimalField(max_digits=13, decimal_places=4, default=0, verbose_name="消费金额")
consume_count = models.IntegerField(default=0, verbose_name="消费次数")
point = models.DecimalField(max_digits=13, decimal_places=4, default=0, verbose_name="积分")
remark = models.CharField(max_length=64, default="", verbose_name="备注")
class Meta:
db_table = "customer"
verbose_name = "客户"
verbose_name_plural = verbose_name
def is_new_customer(self):
return not bool(self.consume_count)
class CustomerPoint(TimeBaseModel):
"""客户历史积分模型类"""
customer = models.ForeignKey(Customer, on_delete=models.CASCADE, null=False, verbose_name="对应客户对象")
point_change = models.DecimalField(max_digits=13, decimal_places=4, verbose_name="积分变更量")
create_time = models.DateTimeField(auto_now_add=True, null=False, verbose_name="创建时间")
current_point = models.DecimalField(max_digits=13, decimal_places=4, verbose_name="历史时刻当前积分")
type = models.SmallIntegerField(null=False, default=0, verbose_name="变更类型(预留)")
class Meta:
db_table = "customer_point"
verbose_name = "客户历史积分"
verbose_name_plural = verbose_name
class MineAddress(TimeBaseModel):
"""我的地址模型类"""
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="address", null=False, verbose_name="顾客ID")
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, related_name="address", null=False, verbose_name="顾客ID")
province = models.IntegerField(verbose_name="省份编号")
city = models.IntegerField(verbose_name="城市编号")
county = models.IntegerField(verbose_name="区编号")
address = models.CharField(max_length=64, null=False, verbose_name="详细地址")
longitude = models.DecimalField(null=True, max_digits=10, decimal_places=4, verbose_name="经度")
latitude = models.DecimalField(null=True, max_digits=10, decimal_places=4, verbose_name="纬度")
name = models.CharField(max_length=32, null=False, verbose_name="顾客姓名")
sex = models.SmallIntegerField(null=False, default=Sex.UNKNOWN, verbose_name="顾客性别,0:未知1:男2:女")
added = models.CharField(null=True, max_length=50, verbose_name="补充说明,可以填写门牌号等信息")
phone = models.CharField(max_length=32, default="", verbose_name="顾客手机号")
default = models.SmallIntegerField(default=MineAddressDefault.NO, verbose_name="是否为默认地址")
status = models.SmallIntegerField(default=MineAddressStatus.NORMAL, verbose_name="状态,0:删除1:正常")
class Meta:
db_table = "mine_address"
verbose_name = "我的地址"
verbose_name_plural = verbose_name
@property
def full_address(self):
return FormatAddress.get_format_address(
self.province, self.city, self.county, self.address
)
<file_sep>/wsc_django/wsc_django/apps/customer/migrations/0001_initial.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('create_date', models.DateField(auto_now_add=True, verbose_name='客户新增日期')),
('consume_amount', models.DecimalField(decimal_places=4, default=0, max_digits=13, verbose_name='消费金额')),
('consume_count', models.IntegerField(default=0, verbose_name='消费次数')),
('point', models.DecimalField(decimal_places=4, default=0, max_digits=13, verbose_name='积分')),
('remark', models.CharField(default='', max_length=64, verbose_name='备注')),
],
options={
'verbose_name': '客户',
'verbose_name_plural': '客户',
'db_table': 'customer',
},
),
migrations.CreateModel(
name='CustomerPoint',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('point_change', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='积分变更量')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('current_point', models.DecimalField(decimal_places=4, max_digits=13, verbose_name='历史时刻当前积分')),
('type', models.SmallIntegerField(default=0, verbose_name='变更类型(预留)')),
],
options={
'verbose_name': '客户历史积分',
'verbose_name_plural': '客户历史积分',
'db_table': 'customer_point',
},
),
migrations.CreateModel(
name='MineAddress',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('province', models.IntegerField(verbose_name='省份编号')),
('city', models.IntegerField(verbose_name='城市编号')),
('county', models.IntegerField(verbose_name='区编号')),
('address', models.CharField(max_length=64, verbose_name='详细地址')),
('longitude', models.DecimalField(decimal_places=4, max_digits=10, null=True, verbose_name='经度')),
('latitude', models.DecimalField(decimal_places=4, max_digits=10, null=True, verbose_name='纬度')),
('name', models.CharField(max_length=32, verbose_name='顾客姓名')),
('sex', models.SmallIntegerField(default=0, verbose_name='顾客性别,0:未知1:男2:女')),
('phone', models.CharField(default='', max_length=32, verbose_name='顾客手机号')),
('default', models.SmallIntegerField(default=0, verbose_name='是否为默认地址')),
('status', models.SmallIntegerField(default=1, verbose_name='状态,0:删除1:正常')),
],
options={
'verbose_name': '我的地址',
'verbose_name_plural': '我的地址',
'db_table': 'mine_address',
},
),
]
<file_sep>/wsc_django/wsc_django/utils/pagination.py
from rest_framework.pagination import PageNumberPagination
class StandardResultsSetPagination(PageNumberPagination):
page_size = 20 # 每页数目
page_size_query_param = 'page_size' # 前端发送的每页数目关键字名,默认为None
max_page_size = 20 # 前端最多能设置的每页数量<file_sep>/wsc_django/wsc_django/apps/ws/constant.py
CHANNEL_ADMIN = "websocket_admin" # 后台websocket频道<file_sep>/wsc_django/wsc_django/apps/product/urls.py
"""
货品相关的路由
"""
from django.urls import path, re_path
from product import views
urlpatterns_admin = [
path('api/admin/product/', views.AdminProductView.as_view()), # 货品创建&货品详情&编辑货品
path('api/admin/products/', views.AdminProductsView.as_view()), # 获取货品列表&批量修改货品(上架,下架)&批量删除货品
path('api/admin/product/groups/', views.AdminProductGroupsView.as_view()), # 批量更新货品分组&获取货品分组列表
path('api/admin/product/group/', views.AdminProductGroupView.as_view()), # 添加货品分组&编辑货品分组&删除货品分组
path('api/admin/product/group/sort/', views.AdminProductGroupSortView.as_view()), # 货品分组排序
path('api/admin/product/sale_records/', views.AdminProductSaleRecordView.as_view()), # 获取一个货品的销售记录
path(
'api/admin/product/alive-groupon/', views.AdminProductAliveGrouponView.as_view()
), # 查询此刻或者未来有拼团活动的货品
]
urlpatterns_mall = [
re_path(r'^api/mall/product/(?P<shop_code>\w+)/$', views.MallProductView.as_view()), # 获取单个货品详情
re_path(r'^api/mall/products/(?P<shop_code>\w+)/$', views.MallProductsView.as_view()), # 获取所有货品列表,带上分组
]
urlpatterns = urlpatterns_admin + urlpatterns_mall
<file_sep>/wsc_django/wsc_django/apps/promotion/urls.py
"""
营销活动相关的路由
"""
from django.urls import path
urlpatterns = [
]
<file_sep>/wsc_django/wsc_django/apps/product/interface.py
from groupon.services import list_alive_groupon_by_product_ids
from logs.services import create_product_log
from order.selectors import list_order_with_order_details_by_product_id
def list_order_with_order_details_by_product_id_interface(shop_id: int, product_id: int):
"""通过货品ID列出订单,带订单详情"""
order_list = list_order_with_order_details_by_product_id(shop_id, product_id)
return order_list
def list_alive_groupon_by_product_ids_interface(product_ids: list):
"""查询现在或者未来有拼团活动的商品ID"""
product_ids_set = list_alive_groupon_by_product_ids(product_ids)
return product_ids_set
def create_product_log_interface(log_info: dict):
"""创建一条货品模块日志"""
log = create_product_log(log_info)
return log<file_sep>/wsc_django/wsc_django/apps/logs/urls.py
"""
操作日志相关的路由
"""
from django.urls import path
from logs import views
urlpatterns = [
path('api/admin/logs/', views.AdminLogsView.as_view()), # 操作记录列表获取
path('api/admin/log/operators/', views.AdminOperatorsView.as_view()), # 操作人员列表获取
]
<file_sep>/wsc_django/wsc_django/apps/user/utils.py
import jwt
import warnings
import uuid
import time
from calendar import timegm
from datetime import datetime
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext as _
from rest_framework import exceptions
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from rest_framework_jwt.compat import get_username
from rest_framework_jwt.compat import get_username_field
from rest_framework_jwt.settings import api_settings
jwt_decode_handler = api_settings.JWT_DECODE_HANDLER
jwt_get_username_from_payload = api_settings.JWT_PAYLOAD_GET_USERNAME_HANDLER
def jwt_response_payload_handler(token, refresh_token, user=None, request=None):
"""
自定义jwt认证成功返回数据
"""
return {
'token': token,
'refresh_token': refresh_token,
'user_id': user.id,
'nickname': user.nickname,
}
def jwt_payload_handler(user, expiration_delta, token_type='access_token'):
"""
根据DRF-JWT的payload进行了一点自行化的修改
如:1.增加了自定义有效期
2.在荷载中加入了密文密码,保证了密码修改后token失效
3.在荷载中加入了token类型,分别为access_token和refresh_token
"""
username_field = get_username_field()
username = get_username(user)
warnings.warn(
'The following fields will be removed in the future: '
'`email` and `user_id`. ',
DeprecationWarning
)
payload = {
'user_id': user.pk,
'username': username,
'password': <PASSWORD>,
'token_type': token_type,
'exp': datetime.utcnow() + expiration_delta
}
if hasattr(user, 'email'):
payload['email'] = user.email
if isinstance(user.pk, uuid.UUID):
payload['user_id'] = str(user.pk)
payload[username_field] = username
# Include original issued at time for a brand new token,
# to allow token refresh
if api_settings.JWT_ALLOW_REFRESH:
payload['orig_iat'] = timegm(
datetime.utcnow().utctimetuple()
)
if api_settings.JWT_AUDIENCE is not None:
payload['aud'] = api_settings.JWT_AUDIENCE
if api_settings.JWT_ISSUER is not None:
payload['iss'] = api_settings.JWT_ISSUER
return payload
class ZhiHaoJWTAuthentication(JSONWebTokenAuthentication):
"""志浩web项目自定义jwt验证类(根据DRF-JWT改写)"""
def authenticate_token(self, token):
"""用于校验token值,基于authenticate方法改写"""
try:
payload = jwt_decode_handler(token)
except jwt.ExpiredSignature:
print('Signature has expired.')
return False, None
except jwt.DecodeError:
print('Error decoding signature.')
return False, None
except jwt.InvalidTokenError:
return False, None
try:
user = self.authenticate_credentials(payload)
except Exception as e:
print(e)
return False, None
return True, user
def authenticate_credentials(self, payload):
"""
Returns an active user that matches the payload's user id and email.
注:根据自定义载荷改写的自定义验证,基于BaseJSONWebTokenAuthentication父类的同名方法改写
"""
User = get_user_model()
username = jwt_get_username_from_payload(payload)
password = payload.get('<PASSWORD>')
exp_hour = (payload.get('exp') - int(time.time())) / 3600 # 还剩多少小时过期
if not username:
msg = _('Invalid payload.')
raise exceptions.AuthenticationFailed(msg)
try:
user = User.objects.get_by_natural_key(username)
except User.DoesNotExist:
msg = _('Invalid signature.')
raise exceptions.AuthenticationFailed(msg)
if not user.is_active:
msg = _('User account is disabled.')
raise exceptions.AuthenticationFailed(msg)
if user.password != <PASSWORD>:
msg = _('User have update password')
raise exceptions.AuthenticationFailed(msg)
return user<file_sep>/wsc_django/wsc_django/apps/config/interface.py
from logs.services import create_config_log
from shop.models import Shop
from shop.services import update_shop_data
from staff.services import get_staff_by_user_id_and_shop_id_with_user
def create_config_log_interface(log_info: dict):
"""创建设置日志"""
log = create_config_log(log_info)
return log
def update_shop_data_interface(shop: Shop, args: dict):
"""修改店铺信息"""
shop = update_shop_data(shop, args)
return shop
def get_staff_by_user_id_and_shop_id_with_user_interface(
user_id: int, shop_id: int
):
"""通过shop_id与user_id获取员工"""
staff = get_staff_by_user_id_and_shop_id_with_user(user_id, shop_id)
return staff<file_sep>/wsc_django/wsc_django/apps/customer/interface.py
from order.selectors import list_customer_orders
def list_customer_orders_interface(
shop_id: int,
customer_id: int,
order_types: list,
order_pay_types: list,
order_delivery_methods: list,
order_status: list,
):
"""
获取一个客户的历史订单interface
:param shop_id:
:param customer_id:
:param order_types:
:param order_pay_types:
:param order_delivery_methods:
:param order_status:
:return:
"""
order_list = list_customer_orders(
shop_id,
customer_id,
order_types,
order_pay_types,
order_delivery_methods,
order_status,
)
return order_list<file_sep>/wsc_django/wsc_django/apps/pvuv/apps.py
from django.apps import AppConfig
class PvuvConfig(AppConfig):
name = 'pvuv'
<file_sep>/wsc_django/wsc_django/apps/user/views.py
from rest_framework import status
from django_redis import get_redis_connection
from webargs.djangoparser import use_args
from webargs import fields, validate
from wechatpy.oauth import WeChatOAuth
from customer.services import create_customer
from user.constant import UserLoginType, Sex
from user.models import User
from user.utils import jwt_response_payload_handler, ZhiHaoJWTAuthentication
from wsc_django.utils.arguments import DecryptPassword
from wsc_django.apps.settings import MP_APPSECRET, MP_APPID
from wsc_django.utils.constant import PHONE_RE, PASSWORD_RE, EMAIL_RE
from wsc_django.utils.sms import gen_sms_code, YunPianSms, TencentSms
from wsc_django.utils.views import UserBaseView, MallBaseView, AdminBaseView, GlobalBaseView, SuperBaseView
from user.serializers import UserCreateSerializer, SuperUserSerializer, EmailSerializer, UserSerializer
from user.interface import (
get_customer_by_user_id_and_shop_id_interface,
get_customer_by_user_id_and_shop_code_interface,
)
from user.services import (
get_user_by_wx_unionid,
get_openid_by_user_id_and_appid,
create_user_openid,
get_user_by_phone,
get_user_by_phone_and_password,
update_user_basic_data,
update_user_phone, validate_sms_code, update_user_password, send_email, get_user_by_email
)
class AdminUserAuthorizationView(SuperBaseView):
"""总后台-用户登录认证"""
@use_args(
{
"token": fields.String(required=True, allow_none=True, comment="token值"),
"refresh_token": fields.String(required=True, allow_none=True, comment="refresh_token值"),
"shop_code": fields.String(required=False, comment="商铺编号,若从小程序端调用则必传")
}
)
def post(self, request, args):
jwt = ZhiHaoJWTAuthentication()
res, user = jwt.authenticate_token(args.get("token"))
if not res:
refresh_res, user = jwt.authenticate_token(args.get("refresh_token"))
if refresh_res:
# 若token过期,而refresh_token未过期,则刷新token,返回已过期且带上token和用户信息
token = self._refresh_current_user(user)
serializer = SuperUserSerializer(user)
return self.send_success(data={'expire': True, 'token': token, 'user_data': serializer.data})
else:
# 若token过期,且refresh_token也过期,则返回401
return self.send_error(
status_code=status.HTTP_401_UNAUTHORIZED, error_message={"error_text": "用户未登录"}
)
else:
# 若token未到期返回未过期,不带token, 带上用户信息
serializer = SuperUserSerializer(user)
shop_code = args.get("shop_code", None)
user_data = dict(serializer.data)
if shop_code:
# 额外查询用户的积分数据
customer = get_customer_by_user_id_and_shop_code_interface(user.id, shop_code)
user_data["points"] = round(float(customer.point), 2) if customer else 0
user_data["is_new_customer"] = (
customer.is_new_customer() if customer else True
)
return self.send_success(data={'expire': False, 'user_data': user_data})
class SuperUserView(SuperBaseView):
"""总后台-用户-获取用户详情&修改用户基本信息"""
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
},
location="query"
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def get(self, request, args):
user = self._get_current_user(request)
if not user:
return self.send_error(
status_code=status.HTTP_401_UNAUTHORIZED, error_message={"error_text": "用户未登录"}
)
serializer = SuperUserSerializer(user)
return self.send_success(data=serializer.data)
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
"nickname": fields.String(required=False, validate=[validate.Length(1, 15)], comment="用户昵称"),
"realname": fields.String(required=False, validate=[validate.Length(1, 15)], comment="用户真实姓名"),
"sex": fields.Integer(
required=False,
validate=[validate.OneOf([Sex.UNKNOWN, Sex.FEMALE, Sex.MALE])],
),
"birthday": fields.Date(required=False, comment="出生日期"),
"head_image_url": fields.String(required=False, validate=[validate.Length(0,1024)], comment="用户头像")
}
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def put(self, request, args):
user = self._get_current_user(request)
if not user:
return self.send_error(
status_code=status.HTTP_401_UNAUTHORIZED, error_message={"error_text": "用户未登录"}
)
if not args:
return self.send_fail(error_text="参数有误")
user = update_user_basic_data(user, args)
serializer = UserSerializer(user)
return self.send_success(data=serializer.data)
class SuperUserPhoneView(SuperBaseView):
"""总后台-用户-修改用户手机号"""
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
"phone": fields.String(required=True, validate=[validate.Regexp(PHONE_RE)], comment="手机号"),
"sms_code": fields.String(required=True, comment="短信验证码"),
},
location="json"
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def put(self, request, args):
user = self._get_current_user(request)
phone = args["phone"]
if not user:
return self.send_error(
status_code=status.HTTP_401_UNAUTHORIZED, error_message={"error_text": "用户未登录"}
)
# 短信验证码校验
success, info = validate_sms_code(phone, args["sms_code"])
if not success:
return self.send_fail(error_text=info)
success, info = update_user_phone(user, phone)
if not success:
return self.send_fail(error_text=info)
return self.send_success()
class SuperUserPasswordView(SuperBaseView):
"""总后台-用户-修改密码"""
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
"phone": fields.String(required=True, validate=[validate.Regexp(PHONE_RE)], comment="手机号"),
"sms_code": fields.String(required=True, comment="短信验证码"),
"password1": DecryptPassword(required=True, validate=[validate.Regexp(PASSWORD_RE)], comment="密码"),
"password2": DecryptPassword(required=True, validate=[validate.Regexp(PASSWORD_RE)], comment="重复密码"),
}
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def put(self, request, args):
user = self._get_current_user(request)
if not user:
return self.send_error(
status_code=status.HTTP_401_UNAUTHORIZED, error_message={"error_text": "用户未登录"}
)
phone = args["phone"]
# 短信验证码校验
success, info = validate_sms_code(phone, args["sms_code"])
if not success:
return self.send_fail(error_text=info)
success, info = update_user_password(user, args["password1"] ,args["password2"])
if not success:
return self.send_fail(error_text=info)
token, refresh_token = self._set_current_user(user)
response_data = jwt_response_payload_handler(token, refresh_token, user, request)
return self.send_success(data=response_data)
class SuperUserEmailView(SuperBaseView):
"""总后台-用户-验证邮箱&b绑定邮箱&激活邮箱"""
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
"token": fields.String(required=True, comment="验证token"),
},
location="query"
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def get(self, request, args):
token = args["token"]
# 验证token
user = User.check_verify_email_token(token)
if user is None:
return self.send_error(
status_code=status.HTTP_400_BAD_REQUEST, error_message={"detail": "链接信息无效"}
)
else:
user.email_active = True
user.save()
return self.send_success()
@use_args(
{
"email": fields.String(required=True, validate=[validate.Regexp(EMAIL_RE)], comment="邮箱")
}
, location="json"
)
def put(self, request, args):
user = self._get_current_user(request)
if not user:
return self.send_error(
status_code=status.HTTP_401_UNAUTHORIZED, error_message={"error_text": "用户未登录"}
)
check_user = get_user_by_email(args["email"])
if check_user:
return self.send_fail(error_text="该邮箱已绑定其他用户")
serializer = EmailSerializer(user, data=args)
serializer.is_valid(raise_exception=True)
serializer.save()
return self.send_success()
@use_args(
{
"email": fields.String(required=True, validate=[validate.Regexp(EMAIL_RE)], comment="邮箱")
}
, location="json"
)
def post(self, request, args):
user = self._get_current_user(request)
if not user:
return self.send_error(
status_code=status.HTTP_401_UNAUTHORIZED, error_message={"error_text": "用户未登录"}
)
success, info = send_email(user, args["email"])
if not success:
return self.send_fail(error_text=info)
return self.send_success()
class AdminUserView(UserBaseView):
"""后台-用户-登录注册"""
# 父类进行了登录验证,这里覆盖掉
authentication_classes = ()
@use_args(
{
"code": fields.String(required=False, comment="微信code"),
"phone": fields.String(required=False, validate=[validate.Regexp(PHONE_RE)], comment="手机号"),
"sms_code": fields.String(required=False, comment="短信验证码"),
"password": <PASSWORD>(required=False, validate=[validate.Regexp(PASSWORD_RE)], comment="密码"),
"login_type": fields.Integer(
required=True,
validate=[validate.OneOf(
[UserLoginType.WX, UserLoginType.PWD, UserLoginType.PHONE]
)],
comment="登录方式,0:微信,1:密码,2:手机"
)
},
location="json",
)
def post(self, request, args):
login_type = args["login_type"]
code = args.get("code", None)
phone = args.get("phone", None)
pwd = args.get("password", None)
sms_code = args.get("sms_code", None)
# 若登录方式为微信
if login_type == UserLoginType.WX:
if not code:
return self.send_fail(error_text="微信登录缺少code")
# 若登录方式为密码
elif login_type == UserLoginType.PWD:
if not phone and not pwd:
return self.send_fail(error_text="密码登录缺手机号或密码")
success, user = get_user_by_phone_and_password(phone, pwd, login_type)
if not success:
return self.send_fail(error_text=user)
token, refresh_token = self._set_current_user(user)
response_data = jwt_response_payload_handler(token, refresh_token, user, request)
return self.send_success(data=response_data)
# 若登陆方式为手机号
else:
if not phone and not sms_code:
return self.send_fail(error_text="密码登录缺手机号或验证码")
redis_conn = get_redis_connection("verify_codes")
real_sms_code = redis_conn.get("sms_%s"%phone)
if not real_sms_code:
return self.send_fail(error_text="验证码已过期")
if str(real_sms_code.decode()) != sms_code:
return self.send_error(
status_code=status.HTTP_400_BAD_REQUEST, error_message={"detail":"短信验证码错误"}
)
success, user = get_user_by_phone(phone, login_type)
if not success:
return self.send_fail(error_text=user)
# user不存在,进行注册
if not user:
data = {
"phone": phone,
"username": phone,
"nickname": "用户{phone}".format(phone=phone),
"head_image_url": "http://img.senguo.cc/FlMKOOnlycuoZp1rR39LyCFUHUgl"
}
serializer = UserCreateSerializer(data=data)
serializer.is_valid()
user = serializer.save()
token, refresh_token = self._set_current_user(user)
response_data = jwt_response_payload_handler(token, refresh_token, user, request)
return self.send_success(data=response_data)
class AdminUserLogoutView(SuperBaseView):
"""后台-退出登录"""
def post(self, request):
# 前端清除jwt
user = self._get_current_user(request)
if not user:
return self.send_error(
status_code=status.HTTP_401_UNAUTHORIZED, error_message={"detail": "用户未登录"}
)
return self.send_success()
class MallUserView(MallBaseView):
"""商城-用户-登录"""
# 父类进行了登录验证,这里覆盖掉
authentication_classes = ()
@use_args(
{
"code": fields.String(required=False, comment="微信code"),
"phone": fields.String(required=False, validate=[validate.Regexp(PHONE_RE)], comment="手机号"),
"sms_code": fields.String(required=False, comment="短信验证码"),
"password": <PASSWORD>(required=False, validate=[validate.Regexp(PASSWORD_RE)], comment="密码"),
"login_type": fields.Integer(
required=True,
validate=[validate.OneOf(
[UserLoginType.WX, UserLoginType.PWD, UserLoginType.PHONE]
)],
comment="登录方式,0:微信,1:密码,2:手机"
)
}, location="json",
)
def post(self, request, args, shop_code):
login_type = args["login_type"]
code = args.get("code", None)
phone = args.get("phone", None)
pwd = args.get("password", None)
sms_code = args.get("sms_code", None)
self._set_current_shop(request, shop_code)
shop = self.current_shop
# todo 微信登录还需要修改
# 若登录方式为微信
if login_type == UserLoginType.WX:
if not code:
return self.send_fail(error_text="微信登录缺少code")
shop_appid = MP_APPID
shop_appsecret = MP_APPSECRET
wechat_oauth = WeChatOAuth(
app_id=shop_appid,
secret=shop_appsecret,
redirect_uri="",
scope="snsapi_userinfo",
)
try:
wechat_oauth.fetch_access_token(code)
user_info = wechat_oauth.get_user_info()
except:
return self.send_fail(error_text='获取微信授权失败')
"""
user_info = {
"openid": "oMZbfv3iy12L1q1XGWpkko_P_YPI",
"nickname": "hpf",
"sex": 1,
"language": "zh_CN",
"city": "武汉",
"province": "湖北",
"country": "中国",
"headimgurl": "http://thirdwx.qlogo.cn/mmopen/vi_32/yctGCWkz1jI2ybfVe12KmrXIb9R89dfgnoribX9sG75hBPJQlsK30fnib9r4nKELHcpcXAibztiaHH3jz65f03ibOlg/132",
"privilege": [],
"unionid": "oIWUauOLaT50pWKUeNKhKP6W0WIU"
}
"""
user_info["headimgurl"] = user_info["headimgurl"].replace("http://", "https://")
user = get_user_by_wx_unionid(user_info.get("unionid"))
if not user:
new_user_info = {
"username":user_info.get('phone'),
"phone": user_info.get('phone'),
"sex": user_info.get('sex'),
"nickname": user_info.get("nickname"),
"realname": user_info.get("realname"),
"head_image_url": user_info.get("headimgurl"),
"wx_unionid": user_info.get("unionid"),
"wx_openid": user_info.get("openid"),
"wx_country": user_info.get("country"),
"wx_province": user_info.get("province"),
"wx_city": user_info.get("city"),
}
user_serializer = UserCreateSerializer(data=new_user_info)
user = user_serializer.save()
ret, user_openid = get_openid_by_user_id_and_appid(user.id, shop_appid)
# 不存在则添加用户的openid
if not ret:
info = {
'user_id': user.id,
'mp_appid': shop_appid,
'wx_openid': user_info.get("openid"),
}
create_user_openid(**info)
# 若登录方式为密码
elif login_type == UserLoginType.PWD:
if not phone and not pwd:
return self.send_fail(error_text="密码登录缺手机号或密码")
success, user = get_user_by_phone_and_password(phone, pwd, login_type)
if not success:
return self.send_fail(error_text=user)
# 若登陆方式为手机号
else:
if not phone and not sms_code:
return self.send_fail(error_text="密码登录缺手机号或验证码")
redis_conn = get_redis_connection("verify_codes")
real_sms_code = redis_conn.get("sms_%s" % phone)
if not real_sms_code:
return self.send_fail(error_text="验证码已过期")
if str(real_sms_code.decode()) != sms_code:
return self.send_error(
status_code=status.HTTP_400_BAD_REQUEST, error_message={"detail": "短信验证码错误"}
)
success, user = get_user_by_phone(phone, login_type)
if not success:
return self.send_fail(error_text=user)
# user不存在
if not user:
return self.send_fail(error_text="该用户不存在")
customer = get_customer_by_user_id_and_shop_id_interface(user.id, shop.id)
# 新客户则创建客户信息
if not customer:
create_customer(user.id, shop.id)
token, refresh_token = self._set_current_user(user)
response_data = jwt_response_payload_handler(token, refresh_token, user, request)
return self.send_success(data=response_data)
class MallUserRegisterView(MallBaseView):
"""商城-用户-注册"""
# 父类进行了登录验证,这里覆盖掉
authentication_classes = ()
@use_args(
{
"phone": fields.String(required=True, validate=[validate.Regexp(PHONE_RE)], comment="手机号"),
"sms_code": fields.String(required=True, comment="短信验证码"),
"password1": fields.String(required=True, comment="密码1"),
"password2": fields.String(required=True, comment="密码2"),
}, location="json",
)
def post(self, request, args, shop_code):
self._set_current_shop(request, shop_code)
shop = self.current_shop
phone = args.get("phone")
sms_code = args.get("sms_code")
# 验证密码是否一致
if args.get("password1") != args.get("password2"):
return self.send_fail(error_text="两次输入的密码不一致")
# 校验验证码
redis_conn = get_redis_connection("verify_codes")
real_sms_code = redis_conn.get("sms_%s" % phone)
if not real_sms_code:
return self.send_fail(error_text="验证码已过期")
if str(real_sms_code.decode()) != sms_code:
return self.send_error(
status_code=status.HTTP_400_BAD_REQUEST, error_message={"detail": "短信验证码错误"}
)
data = {
"phone": phone,
"username": phone,
"nickname": "用户{phone}".format(phone=phone),
"head_image_url": "http://img.senguo.cc/FlMKOOnlycuoZp1rR39LyCFUHUgl",
"password": args.get("<PASSWORD>")
}
serializer = UserCreateSerializer(data=data)
serializer.is_valid()
user = serializer.save()
customer = get_customer_by_user_id_and_shop_id_interface(user.id, shop.id)
# 新客户则创建客户信息
if not customer:
create_customer(user.id, shop.id)
token, refresh_token = self._set_current_user(user)
response_data = jwt_response_payload_handler(token, refresh_token, user, request)
return self.send_success(data=response_data)
class MallUserAuthorizationView(MallBaseView):
"""商城-用户-用户登录认证"""
def post(self, request, shop_code):
self._set_current_shop(request, shop_code)
shop = self.current_shop
user = self.current_user
serializer = UserSerializer(user)
# 额外查询用户的积分数据
customer = get_customer_by_user_id_and_shop_id_interface(user.id, shop.id)
customer_info = dict(serializer.data)
customer_info["points"] = round(float(customer.point), 2) if customer else 0
customer_info["is_new_customer"] = (
customer.is_new_customer() if customer else True
)
return self.send_success(data=customer_info)
class SMSCodeView(GlobalBaseView):
"""用户-发送短信验证码接口"""
@use_args(
{
"phone": fields.String(
required=True, validate=[validate.Regexp(PHONE_RE)], comment="手机号"
)
},
location="json"
)
def post(self, request, args):
phone = args["phone"]
if self.request.META.get('HTTP_X_FORWARDED_FOR'):
remote_ip = request.META.get("HTTP_X_FORWARDED_FOR")
else:
remote_ip = self.request.META.get("REMOTE_ADDR")
phone_ip = "bind_phone_ip:%s:%s" % (phone, remote_ip)
redis_conn = get_redis_connection("verify_codes")
if redis_conn.get(phone_ip):
return self.send_fail(error_text="一分钟只能发生一次")
sms_code = gen_sms_code()
print("sms_code: ", sms_code) # 测试用
# 在发送短信验证码前保存数据,以免多次访问和注册时验证
pl = redis_conn.pipeline()
pl.setex("sms_%s" % (phone), 300, sms_code) # 验证码过期时间300秒
pl.setex(phone_ip, 60, 1) # 验证码60秒发送一次
pl.execute()
# try:
# # # 调用第三方接口发送短信
# use = "绑定手机号"
# # 先用腾讯发,失败就云片用发
# ret, info = TencentSms.send_tencent_verify_code(phone, sms_code, use)
# if not ret:
# ret, info = YunPianSms.send_yunpian_verify_code(phone, sms_code, use)
# if not ret:
# return self.send_fail(error_text=info)
# except Exception as e:
# print(e)
# return self.send_fail(error_text="短信发送失败")
return self.send_success()
<file_sep>/wsc_django/wsc_django/apps/logs/serializers.py
from rest_framework import serializers
from wsc_django.utils.constant import DateFormat
class OperatorSerializer(serializers.Serializer):
"""操作人序列化器类"""
operator_id = serializers.IntegerField(source="id", label="操作人ID(User_id)")
realname = serializers.CharField(required=False, label="用户真实姓名")
nickname = serializers.CharField(required=False, label="微信昵称")
sex = serializers.IntegerField(required=False, label="性别")
head_image_url = serializers.CharField(required=False, label="头像")
class _LogSerializer(serializers.Serializer):
"""公用日志-不对外开放"""
operate_time = serializers.DateTimeField(format=DateFormat.TIME, label="操作时间")
operate_module = serializers.IntegerField(label="所属板块ID")
operate_type_text = serializers.CharField(label="操作类型文字版")
operate_content = serializers.CharField(label="操作内容")
operator = OperatorSerializer(label="操作人")
class OrderLogSerializer(_LogSerializer):
"""订单日志序列化器类"""
order_num = serializers.CharField(label="订单号")
old_value = serializers.CharField(required=False, label="旧值,从content取得")
new_value = serializers.CharField(required=False, label="新值,从content取得")
class ConfigLogSerializer(_LogSerializer):
"""设置日志序列化器类"""
class ProductLogSerializer(_LogSerializer):
"""货品日志序列化器类"""
class PromotionLogSerializer(_LogSerializer):
"""货品日志序列化器类"""
<file_sep>/wsc_django/wsc_django/apps/product/views.py
from rest_framework import status
from webargs.djangoparser import use_args
from webargs import fields, validate
from logs.constant import ProductLogType
from product.constant import ProductStatus, ProductOperationType
from promotion.constant import PromotionType
from wsc_django.utils.arguments import StrToList
from wsc_django.utils.pagination import StandardResultsSetPagination
from wsc_django.utils.views import AdminBaseView, MallBaseView
from product.interface import list_order_with_order_details_by_product_id_interface, create_product_log_interface, \
list_alive_groupon_by_product_ids_interface
from product.serializers import (
MallProductSerializer,
ProductCreateSerializer,
AdminProductSerializer,
AdminProductsSerializer,
MallProductGroupSerializer,
AdminProductGroupSerializer,
AdminProductSaleRecordSerializer,
)
from product.services import (
get_product_by_id,
list_product_by_ids,
update_products_status,
list_product_by_filter,
sort_shop_product_group,
get_product_with_group_name,
delete_product_by_ids_and_shop_id,
update_product_product_group_by_ids,
get_product_group_by_shop_id_and_id,
list_product_group_with_product_list,
list_product_group_with_product_count,
delete_product_group_by_id_and_shop_id,
)
class AdminProductView(AdminBaseView):
"""后台-货品-货品创建&货品详情&编辑货品&批量删除货品"""
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{
"name": fields.String(
required=True, validate=[validate.Length(1, 15)], comment="货品名"
),
"group_id": fields.Integer(required=True, comment="分组id"),
"price": fields.Decimal(
required=True,
validate=[validate.Range(0, min_inclusive=False)],
comment="货品单价",
),
"storage": fields.Decimal(
required=True, validate=[validate.Range(0)], comment="商品库存"
),
"code": fields.String(required=False, comment="货品编码"),
"summary": fields.String(
required=False, validate=[validate.Length(0, 20)], comment="货品简介"
),
"pictures": fields.List(
fields.String(),
required=False,
validate=[validate.Length(1, 5)],
comment="轮播图",
),
"description": fields.String(required=False, comment="图文描述"),
"cover_image_url": fields.String(required=True, comment="首页图片"),
},
location="json",
)
def post(self, request, args):
group_id = args.get("group_id")
args["shop_id"] = self.current_shop.id
product_group = get_product_group_by_shop_id_and_id(self.current_shop.id, group_id)
if not product_group:
return self.send_fail(error_text="货品分组不存在")
serializer = ProductCreateSerializer(data=args, context={'self':self})
if not serializer.is_valid():
return self.send_error(
error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return self.send_success(data=serializer.data)
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{
"product_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="货品ID"
)
},
location="query"
)
def get(self, request, args):
current_shop = self.current_shop
product_id = args.get("product_id")
product = get_product_with_group_name(current_shop.id, product_id)
if not product:
return self.send_fail(error_text="货品不存在")
serializer = AdminProductSerializer(product)
return self.send_success(data=serializer.data)
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{
"name": fields.String(
required=True, validate=[validate.Length(1, 15)], comment="货品名"
),
"group_id": fields.Integer(required=True, comment="分组id"),
"price": fields.Decimal(
required=True,
validate=[validate.Range(0, min_inclusive=False)],
comment="货品单价",
),
"code": fields.String(required=False, comment="货品编码"),
"storage": fields.Decimal(
required=True, validate=[validate.Range(0)], comment="库存"
),
"summary": fields.String(
required=False, validate=[validate.Length(0, 20)], comment="货品简介"
),
"pictures": fields.List(
fields.String(),
required=False,
validate=[validate.Length(1, 5)],
comment="轮播图",
),
"description": fields.String(required=False, comment="图文描述"),
"cover_image_url": fields.String(required=False, comment="首页图片"),
"product_id": fields.Integer(required=True, comment="货品ID"),
},
location="json",
)
def put(self, request, args):
shop_id = self.current_shop.id
product_id = args.pop("product_id")
product = get_product_by_id(shop_id, product_id)
if not product:
return self.send_fail(error_text="货品不存在")
group_id = args.pop("group_id")
product_group = get_product_group_by_shop_id_and_id(shop_id, group_id)
if not product_group:
return self.send_fail(error_text="货品分组不存在")
args["group_id"] = product_group.id
serializer = AdminProductSerializer(product, data=args, context={"self":self})
if not serializer.is_valid():
return self.send_error(
error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return self.send_success(data=serializer.data)
class AdminProductsView(AdminBaseView):
"""后台-货品-获取货品列表&批量修改货品(上架,下架)&批量删除货品"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{
"keyword": fields.String(required=False, missing="", comment="货品关键字"),
"group_id": fields.Integer(required=True, comment="分组ID"),
"page": fields.Integer(required=False, missing=1, comment="页码"),
"status": StrToList(
required=False,
missing=[ProductStatus.ON, ProductStatus.OFF],
validate=[validate.ContainsOnly([ProductStatus.ON, ProductStatus.OFF])],
comment="货品状态,上架、下架",
),
"promotion_types": StrToList(
required=False,
missing=[],
validate=[
validate.ContainsOnly({PromotionType.NORMAL, PromotionType.GROUPON})
],
comment="货品营销类型",
),
},
location="query"
)
def get(self, request, args):
page = args.pop("page")
shop = self.current_shop
product_list = list_product_by_filter(shop.id, **args)
# page为-1时不分页
if page < 0:
product_list = {"results": AdminProductsSerializer(product_list, many=True).data}
else:
product_list = self._get_paginated_data(product_list, AdminProductsSerializer)
return self.send_success(data_list=product_list)
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{
"product_ids": fields.List(
fields.Integer(required=True),
required=True,
validate=[validate.Length(1)],
commet="货品ID列表",
),
"operation_type": fields.Integer(
required=False,
missing=1,
validate=[validate.OneOf(
[ProductOperationType.ON, ProductOperationType.OFF]
)],
comment="操作类型,1:上架,2:下架"
)
},
location="json"
)
def put(self, request, args):
operation_type = args.get("operation_type")
product_ids = args.get("product_ids")
product_list = list_product_by_ids(self.current_shop.id, product_ids)
product_ids = [pl.id for pl in product_list]
product_ids_set = list_alive_groupon_by_product_ids_interface(product_ids)
product_name_list = update_products_status(
product_list, operation_type, product_ids_set
)
if product_name_list:
if operation_type == ProductOperationType.ON:
log_operate_type = ProductLogType.ON_PRODUCT
else:
log_operate_type = ProductLogType.OFF_PRODUCT
log_info = {
"shop_id": self.current_shop.id,
"operator_id": self.current_user.id,
"operate_type": log_operate_type,
"operate_content": "、".join(product_name_list),
}
create_product_log_interface(log_info)
return self.send_success()
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{
"product_ids": fields.List(
fields.Integer(required=True),
required=True,
validate=[validate.Length(1)],
commet="货品ID列表",
)
},
location="json",
)
def delete(self, request, args):
product_ids = args.get("product_ids")
product_list = list_product_by_ids(self.current_shop.id, product_ids)
product_ids = [pl.id for pl in product_list]
product_ids_set = list_alive_groupon_by_product_ids_interface(product_ids)
product_name_list = delete_product_by_ids_and_shop_id(product_list, product_ids_set)
# 记录日志
if product_name_list:
log_info = {
"shop_id": self.current_shop.id,
"operator_id": self.current_user.id,
"operate_type": ProductLogType.DELETE_PRODUCT,
"operate_content": "、".join(product_name_list),
}
create_product_log_interface(log_info)
return self.send_success()
class AdminProductGroupsView(AdminBaseView):
"""后台-货品-批量更新货品分组"""
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{
"product_ids": fields.List(
fields.Integer(required=True),
required=True,
validate=[validate.Length(1)],
comment="货品ID列表",
),
"group_id": fields.Integer(required=True, comment="货品分组ID"),
},
location="json"
)
def put(self, request, args):
shop = self.current_shop
group_id = args.get("group_id")
product_ids = args.pop("product_ids")
# 校验分组是否存在
product_group = get_product_group_by_shop_id_and_id(shop.id, group_id)
if not product_group:
return self.send_fail(error_text="货品分组不存在")
# 获取货品,更新货品信息
update_product_product_group_by_ids(product_ids, group_id)
return self.send_success()
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{
"status": StrToList(
required=False,
missing=[ProductStatus.ON, ProductStatus.OFF],
validate=[
validate.ContainsOnly(
[ProductStatus.ON, ProductStatus.OFF]
)
],
comment="货品状态,上架/下架",
)
},
location="query"
)
def get(self, request, args):
shop = self.current_shop
product_group_with_count = list_product_group_with_product_count(shop.id, **args)
serializer = AdminProductGroupSerializer(product_group_with_count, many=True)
return self.send_success(data_list=serializer.data)
class AdminProductGroupView(AdminBaseView):
"""后台-货品-添加货品分组&编辑货品分组&删除货品分组"""
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{
"name": fields.String(
required=True, validate=[validate.Length(1, 10)], comment="分组名"
),
"description": fields.String(
required=False, validate=[validate.Length(0, 50)], comment="分组描述"
),
},
location="json",
)
def post(self, request, args):
serializer = AdminProductGroupSerializer(data=args, context={"self":self})
if not serializer.is_valid():
return self.send_error(status_code=status.HTTP_400_BAD_REQUEST, error_message=serializer.errors)
serializer.save()
return self.send_success(data=serializer.data)
@use_args(
{
"name": fields.String(
required=True, validate=[validate.Length(1, 10)], comment="分组名"
),
"description": fields.String(
required=False, validate=[validate.Length(0, 50)], comment="分组描述"
),
"group_id": fields.Integer(required=True, comment="分组ID"),
},
location="json"
)
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
def put(self, request, args):
group_id = args.pop("group_id")
shop = self.current_shop
product_group = get_product_group_by_shop_id_and_id(shop.id, group_id)
if not product_group:
return self.send_fail(error_text="货品分组不存在")
serializer = AdminProductGroupSerializer(product_group, data=request.data, context={"self": self})
if not serializer.is_valid():
return self.send_error(error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST)
serializer.save()
return self.send_success()
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{"group_id": fields.Integer(required=True, comment="分组ID")}, location="json",
)
def delete(self, request, args):
shop = self.current_shop
user = self.current_user
group_id = args.get("group_id")
product_group = get_product_group_by_shop_id_and_id(shop.id, group_id)
if not product_group:
return self.send_fail(error_text="货品分组不存在")
ret, info = delete_product_group_by_id_and_shop_id(
product_group, group_id, shop.id, user.id
)
if not ret:
return self.send_fail(error_text=info)
else:
return self.send_success()
class AdminProductGroupSortView(AdminBaseView):
"""后台-货品-分组排序"""
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{
"group_ids": fields.List(
fields.Integer(required=True),
required=True,
validate=[validate.Length(1)],
comment="货品分组ID列表",
)
},
location="json"
)
def put(self, request, args):
group_ids = args.get("group_ids")
shop = self.current_shop
sort_shop_product_group(shop.id, group_ids)
return self.send_success()
class AdminProductSaleRecordView(AdminBaseView):
"""后台-货品-货品详情-获取货品销售记录"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{
"product_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="货品ID"
)
},
location="query"
)
def get(self, request, args):
product_id = args.get("product_id")
shop = self.current_shop
product_sale_record_list = list_order_with_order_details_by_product_id_interface(shop.id, product_id)
product_sale_record_list = self._get_paginated_data(
product_sale_record_list, AdminProductSaleRecordSerializer
)
return self.send_success(data_list=product_sale_record_list)
class AdminProductAliveGrouponView(AdminBaseView):
"""后台-货品-查询此刻或者未来有拼团活动的货品"""
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_PRODUCT])
@use_args(
{
"product_ids": fields.List(
fields.Integer(required=True),
required=True,
validate=[validate.Length(1)],
commet="货品ID列表",
)
},
location="query",
)
def get(self, request, args):
product_ids = args["product_ids"]
groupon_product_set = list_alive_groupon_by_product_ids_interface(
product_ids
)
product_ids_dict = {"alive_grouon_product": []}
for product_id in groupon_product_set:
product_ids_dict["alive_grouon_product"].append(product_id)
return self.send_success(data=product_ids_dict)
class MallProductView(MallBaseView):
"""商城-货品-获取单个货品详情"""
@use_args({"product_id": fields.Integer(required=True, comment="货品ID")}, location="query")
def get(self, request, args, shop_code):
self._set_current_shop(request, shop_code)
shop = self.current_shop
product_id = args.get("product_id")
product = get_product_with_group_name(shop.id, product_id)
serializer = MallProductSerializer(product)
return self.send_success(data=serializer.data)
class MallProductsView(MallBaseView):
"""商城-货品-获取所有分组及旗下货品"""
def get(self, request, shop_code):
self._set_current_shop(request, shop_code)
shop = self.current_shop
product_group_list = list_product_group_with_product_list(shop.id)
serializer = MallProductGroupSerializer(product_group_list, many=True)
return self.send_success(data_list=serializer.data)
<file_sep>/wsc_django/wsc_django/apps/payment/urls.py
"""
支付相关的路由
"""
from django.urls import path, re_path
from payment import views
urlpatterns = [
re_path(r'^api/mall/(?P<shop_code>\w+)/payment/openid/$', views.MallPaymentOpenIdView.as_view()), # 获取支付的openid
re_path(r'^api/mall/(?P<shop_code>\w+)/openid/lcsw/$', views.MallOpenidLcswView.as_view()), # 利楚openid接口
path('api/payment/lcsw/callback/order/', views.LcswPaymentCallbackView.as_view()), # 利楚商务回调
]
<file_sep>/wsc_django/wsc_django/apps/pvuv/services.py
import datetime
from pvuv.models import ProductBrowseRecord
def create_product_browse_record(info: dict):
"""
创建一条货品浏览记录
:param info: {
"shop_id": int,
"user_id": int,
"product_id": int,
"start_time": datetime,
"duration": int,
"pre_page_name": str,
"next_page_name": str
}
:return:
"""
record = ProductBrowseRecord(**info)
record.save()
def list_product_browse_record_by_id(
shop_id: int,
product_id: int,
from_date: datetime,
to_date: datetime,
):
"""
获取一个商品的访问记录列表
:param shop_id:
:param product_id:
:param from_date:
:param to_date:
:return:
"""
record_list = (
ProductBrowseRecord.objects.filter(
shop_id=shop_id, product_id=product_id, start_time__range=[from_date, to_date]
)
.order_by("-start_time", "id")
.all()
)
return record_list<file_sep>/wsc_django/wsc_django/apps/printer/urls.py
"""
打印机相关的路由
"""
from django.urls import path
urlpatterns = [
]
<file_sep>/wsc_django/wsc_django/apps/groupon/apps.py
from django.apps import AppConfig
class GrouponConfig(AppConfig):
name = 'groupon'
<file_sep>/wsc_django/wsc_django/apps/config/serializers.py
from rest_framework import serializers
class ShareSetupSerializer(serializers.Serializer):
"""店铺分享序列化器类"""
custom_title_name = serializers.CharField(label="自定义分享标题名称")
custom_share_description = serializers.CharField(label="自定义分享描述")
class SomeConfigSerializer(serializers.Serializer):
"""店铺的一些奇怪的开关设置"""
new_order_voice = serializers.BooleanField(label="新订单语音提醒")
show_off_product = serializers.BooleanField(label="显示已下架货品")
weixin_jsapi = serializers.BooleanField(label="是否开通微信支付")
on_delivery = serializers.BooleanField(label="是否开通货到付款")
class PrinterSerializer(serializers.Serializer):
"""打印机序列化器类"""
brand = serializers.IntegerField(label="打印机品牌")
code = serializers.CharField(label="打印机终端号")
key = serializers.CharField(label="打印机秘钥")
auto_print = serializers.BooleanField(label="是否订单自动打印")
class ReceiptSerializer(serializers.Serializer):
"""小票序列化器类"""
bottom_msg = serializers.CharField(label="小票底部附加文字")
bottom_qrcode = serializers.CharField(label="小票底部附加二维码")
brcode_active = serializers.BooleanField(label="是否打印订单号条码")
copies = serializers.IntegerField(label="默认打印份数")
class MsgNotifySerializer(serializers.Serializer):
"""消息通知序列化器类"""
order_confirm_wx = serializers.BooleanField(label="开始配送/等待自提-微信")
order_confirm_msg = serializers.BooleanField(label="开始配送/等待自提-短信")
order_finish_wx = serializers.BooleanField(label="订单完成-微信")
order_finish_msg = serializers.BooleanField(label="订单完成-短信")
order_refund_wx = serializers.BooleanField(label="订单退款-微信")
order_refund_msg = serializers.BooleanField(label="订单退款-短信")
group_success_wx = serializers.BooleanField(label="成团提醒-微信")
group_success_msg = serializers.BooleanField(label="成团提醒-短信")
group_failed_wx = serializers.BooleanField(label="拼团失败-微信")
group_failed_msg = serializers.BooleanField(label="拼团失败-短信")<file_sep>/wsc_django/wsc_django/utils/constant.py
class DateFormat:
YEAR = "%Y"
MONTH = "%Y-%m"
DAY = "%Y-%m-%d"
TIME = "%Y-%m-%d %H:%M:%S"
PHONE_RE = r"^1[0-9]{10}$"
PASSWORD_RE = r"^(?![0-9]+$)(?![a-z]+$)[0-9a-z]{8,16}$"
EMAIL_RE = r"^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$"<file_sep>/wsc_django/wsc_django/apps/staff/serializers.py
from django_redis import get_redis_connection
from rest_framework import serializers
from staff.constant import StaffApplyStatus, StaffRole
from staff.services import create_staff_apply, create_staff
from user.serializers import UserSerializer
from wsc_django.utils.constant import DateFormat
class StaffSerializer(serializers.Serializer):
"""员工序列化器类"""
staff_id = serializers.IntegerField(read_only=True, source="id", label="员工id")
roles = serializers.IntegerField(required=True, label="角色")
permissions = serializers.IntegerField(required=True, label="权限")
position = serializers.CharField(required=False, min_length=0, max_length=16, allow_blank=True, label="员工职位")
entry_date = serializers.DateField(required=False, format=DateFormat.DAY, allow_null=True, label="入职日期")
remark = serializers.CharField(required=False, min_length=0, max_length=20, allow_blank=True, label="备注")
shop_id = serializers.IntegerField(write_only=True, required=False, label="商铺id,仅创建时使用")
user_id = serializers.IntegerField(write_only=True, required=False, label="用户id,仅创建时使用")
realname = serializers.CharField(read_only=True, label="用户真实姓名")
nickname = serializers.CharField(read_only=True, label="微信昵称")
sex = serializers.IntegerField(read_only=True, label="性别")
phone = serializers.CharField(read_only=True, label="手机号")
birthday = serializers.DateField(read_only=True, format=DateFormat.DAY, default="", label="用户生日")
head_image_url = serializers.CharField(read_only=True, label="头像")
def create(self, validated_data):
staff = create_staff(validated_data)
return staff
def update(self, instance, validated_data):
# 超管不可编辑权限和角色
if instance.roles == StaffRole.SHOP_SUPER_ADMIN:
validated_data.pop("roles", 0)
validated_data.pop("permissions", 0)
for k, v in validated_data.items():
setattr(instance, k, v)
instance.save()
return instance
class StaffApplySerializer(serializers.Serializer):
"""员工申请序列化器类"""
staff_apply_id = serializers.IntegerField(source="id", read_only=True, label="员工申请id")
status = serializers.IntegerField(read_only=True, label="申请状态")
create_time = serializers.DateTimeField(
read_only=True, required=False, source="create_at", format=DateFormat.TIME, label="员工申请创建时间"
)
user_info = UserSerializer(source="user", read_only=True, label="用户信息")
def update(self, instance, validated_data):
instance.status = StaffApplyStatus.PASS
instance.save()
return instance
class StaffApplyCreateSerializer(serializers.Serializer):
"""员工申请验证相关序列化器类"""
realname = serializers.CharField(write_only=True, max_length=64, min_length=1, required=True, label="真实姓名")
phone = serializers.CharField(
write_only=True, required=True, min_length=11, max_length=11, label="手机号"
)
sms_code = serializers.CharField(write_only=True, required=True, min_length=4, max_length=4, label="短信验证码")
birthday = serializers.DateField(write_only=True, required=False, label="生日")
def validate(self, attrs):
user = self.context["self"].current_user
sms_code = attrs.pop("sms_code")
phone = attrs["phone"]
redis_conn = get_redis_connection('verify_codes')
real_sms_code = redis_conn.get("sms_%s_%s" % (user.id, phone))
if not real_sms_code:
raise serializers.ValidationError("无效的验证码")
real_sms_code = real_sms_code.decode()
if sms_code != real_sms_code:
raise serializers.ValidationError("短信验证码不正确")
return attrs
def create(self, validated_data):
user = self.context["self"].current_user
shop = self.context["self"].current_shop
user.realname = validated_data.pop("realname")
user.phone = validated_data.pop("phone")
if validated_data.get("birthday"):
user.birthday = validated_data.pop("birthday")
user.save()
data = {"shop_id":shop.id, "user_id":user.id}
staff = create_staff_apply(data)
return staff<file_sep>/wsc_django/wsc_django/apps/order/serializers.py
from django.db import transaction
from rest_framework import serializers
from customer.serializers import AdminCustomerSerializer
from delivery.serializers import AdminDeliverySerializer
from groupon.serializers import GrouponAttendBasicSerializer
from order.services import create_order, _create_order_details, _create_order_address
from user.serializers import UserSerializer
from wsc_django.utils.constant import DateFormat
from wsc_django.utils.core import NumGenerator, FuncField
from wsc_django.utils.validators import delivery_method_validator, order_pay_type_validator
class OrderAddressSerializer(serializers.Serializer):
"""订单地址序列化器类"""
name = serializers.CharField(label="收货人姓名")
phone = serializers.CharField(label="收货人手机号")
sex = serializers.IntegerField(label="收货人性别")
address = serializers.CharField(label="详细地址")
added = serializers.CharField(required=False, allow_blank=True, label="补充说明")
province = serializers.IntegerField(label="省编码")
city = serializers.IntegerField(label="市编码")
county = serializers.IntegerField(label="区编码")
class AdminOrderDetailSerializer(serializers.Serializer):
"""后台订单详情序列化器类"""
product_id = serializers.IntegerField(label="商品id")
product_name = serializers.CharField(label="商品名")
product_cover_picture = serializers.CharField(label="商品封面图")
quantity_net = FuncField(lambda value: round(float(value), 2), label="购买量")
price_net = FuncField(lambda value: round(float(value), 2), label="单价")
amount_net = FuncField(lambda value: round(float(value), 2), label="总价")
class AdminOrderSerializer(serializers.Serializer):
"""后台订单序列化器类"""
id = serializers.IntegerField(required=False, label="订单id")
delivery_method = serializers.IntegerField(
required=True, validators=[delivery_method_validator], label="配送方式:1:送货上门,2:客户自提"
)
delivery_period = serializers.CharField(required=True, label="自提时间段(仅自提必传),举例:今天 12:00~13:00")
order_num = serializers.CharField(required=True, label="订单号")
create_time = serializers.DateTimeField(read_only=True, format=DateFormat.TIME, label="下单时间")
pay_type = serializers.IntegerField(
required=True, validators=[order_pay_type_validator], label="支付方式:1:微信支付,2:货到付款"
)
amount_gross = serializers.DecimalField(
write_only=True, required=True, max_digits=13, decimal_places=4, label="货款金额(优惠前)"
)
amount_net = serializers.DecimalField(required=True, max_digits=13, decimal_places=4, label="货款金额(优惠后)")
delivery_amount_gross = serializers.DecimalField(
write_only=True, required=True, max_digits=13, decimal_places=4, label="货款金额运费(优惠前)",
)
delivery_amount_net = serializers.DecimalField(
required=True, max_digits=13, decimal_places=4, label="货款金额运费(优惠后)"
)
total_amount_gross = serializers.DecimalField(
write_only=True, required=True, max_digits=13, decimal_places=4, label="订单金额(优惠前)"
)
total_amount_net = serializers.DecimalField(
required=True, max_digits=13, decimal_places=4, label="订单金额(优惠后)"
)
remark = serializers.CharField(required=False, default="", min_length=0, max_length=30, label="订单备注")
order_type = serializers.IntegerField(
required=True, label="订单类型,1:普通订单,2:拼团订单"
)
address = OrderAddressSerializer(required=False, label="订单地址")
order_status = serializers.IntegerField(required=False, label="订单状态")
refund_type = serializers.IntegerField(required=False, label="退款方式")
customer = AdminCustomerSerializer(required=False, label="订单对应的客户对象")
delivery = AdminDeliverySerializer(required=False, label="订单对应的配送记录对象")
order_details = AdminOrderDetailSerializer(required=False, many=True, label="订单对应的订单详情对象")
class AdminOrdersSerializer(serializers.Serializer):
"""后台订单列表序列化器类,不需要那么多信息"""
id = serializers.IntegerField(label="订单id")
delivery_method = serializers.IntegerField(
required=True, validators=[delivery_method_validator], label="配送方式:1:送货上门,2:客户自提"
)
delivery_period = serializers.CharField(required=True, label="自提时间段(仅自提必传),举例:今天 12:00~13:00")
order_num = serializers.CharField(required=True, label="订单号")
order_status = serializers.IntegerField(required=False, label="订单状态")
remark = serializers.CharField(required=False, default="", min_length=0, max_length=30, label="订单备注")
pay_type = serializers.IntegerField(
required=True, validators=[order_pay_type_validator], label="支付方式:1:微信支付,2:货到付款"
)
order_type = serializers.IntegerField(
required=True, label="订单类型,1:普通订单,2:拼团订单"
)
create_time = serializers.DateTimeField(format=DateFormat.TIME, label="下单时间")
total_amount_net = serializers.DecimalField(
required=True, max_digits=13, decimal_places=4, label="订单金额(优惠后)"
)
customer = AdminCustomerSerializer(required=False, label="订单对应的客户对象")
order_details = AdminOrderDetailSerializer(required=False, many=True, label="订单对应的订单详情对象")
class MallOrderCreateSerializer(AdminOrderSerializer):
"""商城端订单创建序列化器类"""
shop_id = serializers.IntegerField(write_only=True, label="商铺id")
customer_id = serializers.IntegerField(write_only=True, label="客户id")
groupon_attend_id = serializers.IntegerField(required=False, write_only=True, label="拼团参与id")
def create(self, validated_data):
shop_id = validated_data["shop_id"]
address_info = validated_data.pop("address")
cart_items = self.context["cart_items"]
promotion_attend = self.context["promotion_attend"]
with transaction.atomic():
# 创建一个保存点
save_id = transaction.savepoint()
try:
# 创建订单
order = create_order(validated_data)
order.set_num(NumGenerator.generate(shop_id, order.order_type))
order.save()
# 创建订单详情
success, storage_records = _create_order_details(order, cart_items, promotion_attend)
if not success:
raise serializers.ValidationError(storage_records)
for storage_record in storage_records:
storage_record.order_num = order.order_num
storage_record.save()
# 创建订单地址
address_info["order_id"] = order.id
_create_order_address(address_info, shop_id, validated_data["delivery_method"])
except Exception as e:
print(e)
# 回滚到保存点
transaction.savepoint_rollback(save_id)
raise e
# 提交事务
transaction.savepoint_commit(save_id)
return order
class MallOrderSerializer(AdminOrderSerializer):
"""商城端订单序列化器类"""
amount_net = FuncField(lambda value: round(float(value), 2), label="货品总额")
delivery_amount_net = FuncField(lambda value: round(float(value), 2), label="配送费/服务费")
total_amount_net = FuncField(lambda value: round(float(value), 2), label="订单总额")
groupon_attend = GrouponAttendBasicSerializer(required=False, label="拼团参与信息")
class MallOrdersSerializer(serializers.Serializer):
"""商城端订单列表序列化器类"""
id = serializers.IntegerField(label="订单id")
create_time = serializers.DateTimeField(format=DateFormat.TIME, label="下单时间")
order_type = serializers.IntegerField(label="订单类型,1:普通订单,2:拼团订单")
order_status = serializers.IntegerField(required=False, label="订单状态")
delivery_method = serializers.IntegerField(label="配送方式:1:送货上门,2:客户自提")
delivery_type = serializers.IntegerField(required=False, label="订单配送类型:员工/快递")
total_amount_gross = serializers.DecimalField(max_digits=13, decimal_places=2, label="订单金额(优惠前)")
total_amount_net = serializers.DecimalField(max_digits=13, decimal_places=2, label="订单金额(优惠后)")
order_num = serializers.CharField(label="订单编号")
order_details = AdminOrderDetailSerializer(required=False, many=True, label="订单对应的订单详情对象")
groupon_attend = GrouponAttendBasicSerializer(required=False, label="拼团参与信息")
class OrderLogSerializer(serializers.Serializer):
"""订单日志序列化器类"""
operate_type_text = serializers.CharField(label="操作类型文字版")
operate_type = serializers.IntegerField(label="操作类型")
operate_content = serializers.CharField(label="操作内容")
operate_time = serializers.DateTimeField(format=DateFormat.TIME, label="操作时间")
operator = UserSerializer(label="操作人信息")<file_sep>/wsc_django/wsc_django/apps/user/interface.py
from customer.models import Customer
from customer.services import get_customer_by_user_id_and_shop_id, get_customer_by_user_id_and_shop_code
def get_customer_by_user_id_and_shop_id_interface(user_id: int, shop_id: int) -> Customer:
"""
通过user_id和shop_id获取客户
:param user_id:
:param shop_id:
:return:
"""
customer = get_customer_by_user_id_and_shop_id(user_id, shop_id)
return customer
def get_customer_by_user_id_and_shop_code_interface(user_id: int, shop_code: str) -> Customer:
"""
通过user_id和shop_code获取客户
:param user_id:
:param shop_code:
:return:
"""
customer = get_customer_by_user_id_and_shop_code(user_id, shop_code)
return customer<file_sep>/wsc_django/wsc_django/apps/shop/services.py
from uuid import uuid4
from django.db.models import Count
from product.constant import ProductStatus
from shop.models import Shop, HistoryRealName, ShopRejectReason, PayChannel
from shop.utils import get_shop_mini_program_qcode, put_qcode_file_to_tencent_cos
from user.models import User
from shop.constant import (
ShopStatus,
)
def create_shop(shop_info: dict, user: User):
"""
创建一个商铺
:param shop_info:{
"shop_name": "name",
"shop_img": "http://xxx",
"shop_province": 420000,
"shop_city": 420100,
"shop_county": 420101,
"shop_address": "光谷智慧谷一栋505",
"description": "xxxx",
"suggest_phone": "153xxxxxxxx",
"shop_phone": "152xxxxxxxx",
"super_admin_id": 1
}
:param user: 创建商铺的用户对象
:return:
"""
# 创建店铺
# 随机一个商铺编码, 查一下,万一重复就再来一个
while True:
shop_code = str(uuid4())[-9:]
shop = Shop.objects.filter(shop_code=shop_code)
if not shop:
break
shop_info["shop_code"] = shop_code
shop_info["shop_phone"] = user.phone
shop_info["super_admin_id"] = user.id
shop = Shop(**shop_info)
shop.save()
return shop
def create_pay_channel(pay_channel_info: dict, shop_id: int):
"""
创建一个商铺的pay_channel
:param pay_channel_info:
:param shop_id:
:return:
"""
shop_pay_channel = PayChannel(shop_id=shop_id, **pay_channel_info)
shop_pay_channel.save()
return shop_pay_channel
def create_shop_reject_reason_by_shop_id(shop_id: int, reject_reason: str):
"""
给拒绝的商铺创建一个拒绝理由
:param shop_id:
:return:
"""
reject_reason = ShopRejectReason(id=shop_id, reject_reason=reject_reason)
reject_reason.save()
return reject_reason
def create_shop_creator_history_realname(shop_id: int, history_realname: str):
"""
储存商铺创建者的历史真实姓名, 与店铺绑定
:param shop_id:
:param history_realname:
:return:
"""
history_realname = HistoryRealName(id=shop_id, realname=history_realname)
history_realname.save()
return history_realname
def create_shop_mini_program_qcode(shop_code: str):
"""
为商铺创建小程序码
:param shop_code:
:return:
"""
qcode_file = get_shop_mini_program_qcode(shop_code)
success, url = put_qcode_file_to_tencent_cos(qcode_file, shop_code)
return success, url
def update_shop_data(shop: Shop, args: dict):
"""
修改商铺信息
:param shop:
:param args:
:return:
"""
for k, v in args.items():
setattr(shop, k, v)
shop.save()
return shop
def get_shop_by_shop_code(shop_code: str, only_normal: bool = True):
"""
通过shop_code获取shop对象
:param shop_code: 商铺编码
:param only_normal: 只查询正常
:return:
"""
shop = Shop.objects.filter(shop_code=shop_code)
if shop and only_normal:
shop = shop.filter(status=ShopStatus.NORMAL)
shop = shop.first()
return shop
def get_shop_by_shop_id(shop_id: int, filter_close: bool = True):
"""
通过商铺id获取商
:param shop_id: 商铺id
:param filter_close: 不查询关闭的
:return:
"""
shop = Shop.objects.filter(id=shop_id)
if shop and filter_close:
shop = shop.exclude(status=ShopStatus.CLOSED)
shop = shop.first()
return shop
def list_shop_by_shop_ids(shop_ids: list, filter_close: bool = True, role: int = 1):
"""
通过ship_id列表查询商铺列表
:param shop_ids:
:param filter_close:过滤关闭
:param role: 访问角色,1:为普通用户,2.为admin用户,普通用户访问时只能查到已审核的店铺
:return:
"""
shop_list_query = Shop.objects.filter(id__in=shop_ids)
if shop_list_query and filter_close:
shop_list_query = shop_list_query.exclude(status=ShopStatus.CLOSED)
if role == 1:
shop_list_query = shop_list_query.filter(status=ShopStatus.NORMAL)
shop_list = shop_list_query.all()
return shop_list
def list_shop_by_shop_status(shop_status: int):
"""
查询某一状态的所有商铺
:param shop_status:
:return:
"""
shop_list = Shop.objects.filter(status=shop_status).order_by('update_at').all()
return shop_list
def list_shop_creator_history_realname(shop_ids: list):
"""
找出商铺创建的历史真实姓名列表
:param shop_ids:
:return:
"""
history_realname_list = (
HistoryRealName.objects.filter(id__in=shop_ids).all()
)
return history_realname_list
def list_shop_reject_reason(shop_ids: list):
"""查询出所有的商铺拒绝信息"""
reject_reason_list = ShopRejectReason.objects.filter(id__in=shop_ids).all()
return reject_reason_list<file_sep>/wsc_django/wsc_django/apps/staff/interface.py
from logs.services import create_staff_log
def create_staff_log_interface(log_info: dict):
"""创建一个员工板块的日志"""
log = create_staff_log(log_info)
return log<file_sep>/wsc_django/wsc_django/utils/exceptions.py
"""异常处理"""
from django.db.utils import DatabaseError
from rest_framework import exceptions
from django.db.transaction import TransactionManagementError
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import exception_handler
from webargs import ValidationError
def wsc_exception_handler(exc, context):
# 先调用REST framework默认的异常处理方法获得标准错误响应对象,这里处理抛出的异常
response = exception_handler(exc, context)
if isinstance(exc, ValidationError):
exc.status_code = 400
if hasattr(exc, "status_code"):
status_code = exc.status_code
else:
status_code = 500
data = {"success": False}
if status_code == 400:
if exc.messages.get("query"):
data["error_text"] = exc.messages.get("query")
else:
data["error_text"] = exc.messages.get("json")
data["error_code"] = 400
response = Response(data, status=status.HTTP_400_BAD_REQUEST)
elif status_code == 404:
data["error_text"] = "地址错误" if not exc.args else exc.args[0]
data["error_code"] = 404
response = Response(data, status=status.HTTP_404_NOT_FOUND)
elif status_code == 401:
data["error_text"] = exc.detail
data["error_code"] = 401
response = Response(data, status=status.HTTP_401_UNAUTHORIZED)
elif status_code == 403:
data["error_text"] = exc.detail
data["error_code"] = 403
response = Response(data, status=status.HTTP_403_FORBIDDEN)
elif isinstance(exc, DatabaseError):
data['error_text'] = exc.args[0]
data["error_code"] = 507
response = Response(data, status=status.HTTP_507_INSUFFICIENT_STORAGE)
elif isinstance(exc, TransactionManagementError):
data['error_text'] = '服务器内部错误2'
data["error_code"] = 507
response = Response(data, status=status.HTTP_507_INSUFFICIENT_STORAGE)
else:
data["error_text"] = "服务器错误, {detail}".format(detail=exc.args)
data["error_code"] = 500
response = Response(data, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return response<file_sep>/wsc_django/wsc_django/apps/my_celery/celery_autowork_task.py
""" 自动取消(订单,拼团)异步任务 """
import datetime
import os
# import sentry_sdk
from django.utils.timezone import make_aware
from celery import Celery
# from sentry_sdk.integrations.celery import CeleryIntegration
# from config.services import get_receipt_by_shop_id
from groupon.constant import GrouponStatus
from groupon.services import get_shop_groupon_by_id
from order.constant import OrderPayType, OrderRefundType, OrderType
from order.services import (
cancel_order,
# direct_pay,
# refund_order,
get_order_by_shop_id_and_id,
)
from promotion.events import GrouponEvent
from promotion.services import publish_product_promotion
from wsc_django.apps.settings import CELERY_BROKER
# from .celery_tplmsg_task import (
# GrouponOrderFailAttendTplMsg,
# GrouponOrderRefundFailTplMsg,
# GrouponOrderSuccessAttendTplMsg,
# )
if not os.getenv('DJANGO_SETTINGS_MODULE'):
os.environ['DJANGO_SETTINGS_MODULE'] = 'apps.settings'
app = Celery("wsc_auto_work", broker=CELERY_BROKER, backend="")
app.conf.CELERY_TIMEZONE = "Asia/Shanghai" # 时区
app.conf.CELERYD_CONCURRENCY = 4 # 任务并发数
app.conf.CELERYD_TASK_SOFT_TIME_LIMIT = 600 # 任务超时时间
app.conf.CELERY_DISABLE_RATE_LIMITS = True # 任务频率限制开关
app.conf.CELERY_ROUTES = {
"auto_cancel_order": {"queue": "wsc_auto_work"},
"auto_publish_groupon": {"queue": "wsc_auto_work"},
"auto_expire_groupon": {"queue": "wsc_auto_work"},
# "auto_validate_groupon_attend": {"queue": "wsc_auto_work"},
# "auto_fail_groupon_attend": {"queue": "wsc_auto_work"},
}
# sentry_sdk.init(SENTRY_DSN, integrations=[CeleryIntegration()])
@app.task(bind=True, name="auto_cancel_order")
def auto_cancel_order(self, shop_id, order_id):
""" 超时未支付(15min)自动取消订单 """
success, _ = cancel_order(shop_id, order_id)
if success:
return
order = get_order_by_shop_id_and_id(shop_id, order_id)
if not order:
return
# elif order.order_type == OrderType.GROUPON:
# auto_validate_groupon_attend.apply_async(
# args=[order.shop_id, order.groupon_attend_id]
# )
@app.task(bind=True, name="auto_publish_groupon")
def auto_publish_groupon(self, shop_id, groupon_id):
""" 自动发布拼团事件 """
now = make_aware(datetime.datetime.now())
success, groupon = get_shop_groupon_by_id(shop_id, groupon_id)
if not success:
print("Groupon [id={}] publish failed: {}".format(groupon_id, groupon))
return
if groupon.status != GrouponStatus.ON:
print(
"Groupon [id={}] publish failed: 状态错误{}".format(
groupon_id, groupon.status
)
)
return
if groupon.to_datetime < now:
print(
"Groupon [id={}] publish failed: 已过期{}".format(
groupon_id, groupon.to_datetime
)
)
return
content = {
"id": groupon.id,
"price": round(float(groupon.price), 2),
"to_datetime": groupon.to_datetime.strftime("%Y-%m-%d %H:%M:%S"),
"groupon_type": groupon.groupon_type,
"success_size": groupon.success_size,
"quantity_limit": groupon.quantity_limit,
"succeeded_count": groupon.succeeded_count,
"success_limit": groupon.success_limit,
"succeeded_quantity": int(round(groupon.succeeded_quantity)),
}
event = GrouponEvent(content)
ttl = (groupon.to_datetime - now).total_seconds()
publish_product_promotion(
groupon.shop_id, groupon.product_id, event, ttl=int(ttl)
)
print("Groupon [id={}] publish success".format(groupon.id))
@app.task(bind=True, name="auto_expire_groupon")
def auto_expire_groupon(self, shop_id, groupon_id):
""" 拼团自动过期 """
success, groupon = get_shop_groupon_by_id(shop_id, groupon_id)
if not success:
print("Groupon [id={}] expire failed: {}".format(groupon_id, groupon))
return
if groupon.status == GrouponStatus.EXPIRED:
print(
"Groupon [id={}] expire failed: 状态错误{}".format(
groupon_id, groupon.status
)
)
return
# 任务提前10s过期算作提前
if groupon.to_datetime - make_aware(datetime.datetime.now()) > make_aware(datetime.timedelta(
seconds=10
)):
print(
"Groupon [id={}] expire failed: 未到过期时间{}".format(
groupon_id, datetime.datetime.now()
)
)
return
groupon.set_expired()
print("Groupon [id={}] expire failed".format(groupon_id))
# @app.task(bind=True, name="auto_validate_groupon_attend")
# def auto_validate_groupon_attend(
# self, shop_id: int, groupon_attend_id: int, force: bool = False
# ):
# """ 自动验证拼团参与,如果满员,走订单直接支付 """
# session = scoped_DBSession()
# try:
# success, groupon_attend = get_shop_groupon_attend_by_id(
# session, shop_id, groupon_attend_id, for_update=True
# )
# if not success:
# raise ValueError(groupon_attend)
# if groupon_attend.size < groupon_attend.success_size:
# print("拼团验证: 拼团参与{}还未满员".format(groupon_attend_id))
# return
# if groupon_attend.status != GrouponAttendStatus.WAITTING:
# raise ValueError(
# "拼团验证: 拼团参与{}状态错误{}".format(groupon_attend_id, groupon_attend.status)
# )
# paid_attend_lines = list_paid_lines_by_groupon_attend_id(
# session, groupon_attend.id
# )
# if len(paid_attend_lines) < groupon_attend.size and not force:
# print(
# "拼团验证: 拼团参与{}还在等待团员支付,当前支付人数{}".format(
# groupon_attend_id, len(paid_attend_lines)
# )
# )
# return
# waitting_orders = list_waitting_orders_by_groupon_attend_id(
# session, groupon_attend.id
# )
# if len(waitting_orders) != len(paid_attend_lines):
# raise ValueError(
# "拼团验证: 拼团参与{}付款人数{}和订单人数{}不匹配".format(
# groupon_attend_id, len(paid_attend_lines), len(waitting_orders)
# )
# )
# promotion = get_product_promotion(shop_id, groupon_attend.groupon.product_id)
# pattern = PRODUCT_PROMOTION_KEY.format(
# shop_id=shop_id, product_id=groupon_attend.groupon.product_id
# )
# groupon_attend.set_succeeded()
# groupon_attend.groupon.succeeded_count += 1
# redis.hset(pattern, "succeeded_count", groupon_attend.groupon.succeeded_count)
# for waitting_order in waitting_orders:
# if promotion and isinstance(promotion, GrouponEvent):
# quantity = int(
# round(float(waitting_order.amount_net) / float(promotion.price))
# )
# groupon_attend.groupon.succeeded_quantity += quantity
# redis.hset(
# pattern,
# "succeeded_quantity",
# int(groupon_attend.groupon.succeeded_quantity),
# )
# direct_pay(session, waitting_order)
# print("拼团验证: 拼团参与{}成团成功".format(groupon_attend_id))
# # 拼团成功, 发送拼团成功的模板消息
# msg_notify = get_msg_notify_by_shop_id(session, shop_id)
# if msg_notify.group_success_wx:
# for waitting_order in waitting_orders:
# GrouponOrderSuccessAttendTplMsg.send(order_id=waitting_order.id)
# finally:
# session.close()
# @app.task(bind=True, name="auto_fail_groupon_attend")
# def auto_fail_groupon_attend(self, shop_id: int, groupon_attend_id: int, reason: str):
# """ 拼团参与自动失败 """
# session = scoped_DBSession()
# try:
# success, groupon_attend = get_shop_groupon_attend_by_id(
# session, shop_id, groupon_attend_id, for_update=True
# )
# if not success:
# raise ValueError(groupon_attend)
# if groupon_attend.status != GrouponAttendStatus.WAITTING:
# print("拼团失败: 拼团参与{}状态错误{}".format(groupon_attend_id, groupon_attend.status))
# return
# paid_attend_lines = list_paid_lines_by_groupon_attend_id(
# session, groupon_attend.id
# )
# waitting_orders = list_waitting_orders_by_groupon_attend_id(
# session, groupon_attend.id
# )
# if len(waitting_orders) != len(paid_attend_lines):
# raise ValueError(
# "拼团失败: 拼团参与{}付款人数{}和订单人数{}不匹配".format(
# groupon_attend_id, len(paid_attend_lines), len(waitting_orders)
# )
# )
# groupon_attend.set_failed(reason)
# # 拼团中订单自动退款
# map_refund_order = {True: [], False: []}
# for waitting_order in waitting_orders:
# refund_type = (
# OrderRefundType.WEIXIN_JSAPI_REFUND
# if waitting_order.pay_type == OrderPayType.WEIXIN_JSAPI
# else OrderRefundType.UNDERLINE_REFUND
# )
# success, _ = refund_order(
# session, waitting_order.shop_id, waitting_order.id, refund_type
# )
# map_refund_order[success].append(waitting_order.id)
# # 未支付订单自动取消
# unpaid_orders = list_unpaid_orders_by_groupon_attend_id(
# session, groupon_attend.id
# )
# for unpaid_order in unpaid_orders:
# cancel_order(session, unpaid_order.shop_id, unpaid_order.id)
# session.commit()
# print(
# "拼团失败: 拼团参与{},退款成功{},退款失败".format(
# groupon_attend_id,
# len(map_refund_order.get(True)),
# len(map_refund_order.get(False)),
# )
# )
# # 拼团失败, 发送拼团失败退款的模板消息
# msg_notify = get_msg_notify_by_shop_id(session, shop_id)
# if msg_notify.group_failed_wx:
# for order_id in map_refund_order.get(True):
# GrouponOrderFailAttendTplMsg.send(order_id=order_id)
# for order_id in map_refund_order.get(False):
# GrouponOrderRefundFailTplMsg.send(order_id=order_id)
# finally:
# session.close()
<file_sep>/wsc_django/wsc_django/apps/logs/interface.py
from staff.services import list_staff_by_shop_id_with_user
def list_operator_by_shop_id_with_user_interface(shop_id: int):
"""查询出一个店铺所有员工的user信息"""
operator_list = list_staff_by_shop_id_with_user(shop_id)
return operator_list<file_sep>/wsc_django/wsc_django/utils/permission.py
"""权限相关"""
from rest_framework.permissions import BasePermission
from rest_framework import exceptions
class StaffRolePermission(BasePermission):
"""员工角色权限验证"""
def has_permission(self, request, view):
if (
view.current_staff.roles & view.staff_roles.SHOP_ADMIN == 0
or view.current_staff.permissions == 0
):
raise exceptions.PermissionDenied("该员工角色不满足")
else:
return True
class WSCStaffPermission(BasePermission):
"""员工权限验证"""
def has_permission(self, request, view):
if not view.current_shop or not view.current_staff:
raise exceptions.PermissionDenied("店铺不存在或员工不存在")
else:
return True<file_sep>/wsc_django/wsc_django/apps/payment/migrations/0001_initial.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('order', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='OrderTransaction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('receipt_fee', models.IntegerField(verbose_name='实际支付金额')),
('transaction_id', models.CharField(max_length=64, verbose_name='支付交易单号')),
('channel_trade_no', models.CharField(max_length=64, verbose_name='支付通道的支付单号')),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='order.order', verbose_name='对应订单对象')),
],
options={
'verbose_name': '订单在线支付信息',
'verbose_name_plural': '订单在线支付信息',
'db_table': 'order_transaction',
},
),
]
<file_sep>/wsc_django/wsc_django/apps/customer/migrations/0002_mineaddress_shop.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('shop', '0001_initial'),
('customer', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='mineaddress',
name='shop',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='address', to='shop.shop', verbose_name='顾客ID'),
),
]
<file_sep>/wsc_django/wsc_django/apps/pvuv/migrations/0001_initial.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('product', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ProductBrowseRecord',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('start_time', models.DateTimeField(verbose_name='开始浏览时间')),
('duration', models.IntegerField(verbose_name='停留时间')),
('pre_page_name', models.CharField(max_length=32, verbose_name='上一页名称')),
('next_page_name', models.CharField(max_length=32, verbose_name='下一页名称')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product.product', verbose_name='对应货品对象')),
],
options={
'verbose_name': '货品访问记录',
'verbose_name_plural': '货品访问记录',
'db_table': 'product_browse_record',
},
),
]
<file_sep>/wsc_django/wsc_django/apps/pvuv/serializers.py
from rest_framework import serializers
from user.serializers import UserSerializer
from wsc_django.utils.constant import DateFormat
class ProductBrowseRecordsSerializer(serializers.Serializer):
"""货品浏览记录列表"""
start_time = serializers.DateTimeField(format=DateFormat.TIME, label="浏览时间")
duration = serializers.IntegerField(label="停留时间")
pre_page_name = serializers.CharField(label="上一页名称")
next_page_name = serializers.CharField(label="下一页名称")
user = UserSerializer(label="用户信息")<file_sep>/wsc_django/wsc_django/apps/config/views.py
import json
import random
import re
import string
import time
import qiniu
import requests
from django_redis import get_redis_connection
from sts.sts import Sts
from webargs.djangoparser import use_args
from webargs import fields, validate
from wechatpy import WeChatClient
from config.constant import PrinterBrand
from wsc_django.utils.views import GlobalBaseView, AdminBaseView
from config.serializers import ShareSetupSerializer, PrinterSerializer, ReceiptSerializer, MsgNotifySerializer
from logs.constant import ConfigLogType
from shop.constant import ShopVerifyActive, ShopPayActive
from shop.serializers import AdminShopSerializer
from wsc_django.apps.settings import (
MP_APPID, MP_APPSECRET,
QINIU_ACCESS_KEY,
QINIU_SECRET_KEY,
QINIU_BUCKET_SHOP_IMG,
TENCENT_COS_SECRETID,
TENCENT_COS_SECRETKEY,
)
from config.services import (
create_share_setup,
update_share_setup,
get_share_setup_by_id,
get_receipt_by_shop_id,
get_printer_by_shop_id,
list_msg_notify_fields,
get_msg_notify_by_shop_id,
update_msg_notify_by_shop_id,
update_some_config_by_shop_id,
create_printer_by_shop_id, update_receipt_by_shop_id,
)
from config.interface import (
update_shop_data_interface,
create_config_log_interface,
get_staff_by_user_id_and_shop_id_with_user_interface,
)
msg_notify_field_list = list_msg_notify_fields()
class AdminConfigShopInfoView(AdminBaseView):
"""后台-设置-店铺信息获取"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
def get(self, request):
shop = self.current_shop
staff = get_staff_by_user_id_and_shop_id_with_user_interface(
shop.super_admin_id, shop.id
)
shop.create_user = staff
shop_info = AdminShopSerializer(shop).data
return self.send_success(data=shop_info)
class AdminConfigPrintInfoView(AdminBaseView):
"""后台-设置-打印设置信息获取"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
def get(self, request):
shop_id = self.current_shop.id
printer = get_printer_by_shop_id(shop_id)
receipt = get_receipt_by_shop_id(shop_id)
printer_info = PrinterSerializer(printer).data
receipt_info = ReceiptSerializer(receipt).data
return self.send_success(printer_data=printer_info, receipt_data=receipt_info)
class AdminConfigMsgNotifyView(AdminBaseView):
"""后台-设置-消息通知-获取&修改店铺消息通知设置"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
def get(self, request):
shop_id = self.current_shop.id
msg_notify = get_msg_notify_by_shop_id(shop_id)
msg_notify = MsgNotifySerializer(msg_notify).data
return self.send_success(data=msg_notify)
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{
"field": fields.String(
required=True,
validate=[validate.OneOf(msg_notify_field_list)],
comment="更改的字段",
),
"value": fields.Boolean(required=True, comment="更改的值"),
},
location="json"
)
def put(self, request, args):
shop_id = self.current_shop.id
msg_notify_info = {args.get("field"): args.get("value")}
update_msg_notify_by_shop_id(shop_id, msg_notify_info)
return self.send_success()
class AdminConfigShopImgView(AdminBaseView):
"""后台-设置-店铺信息-修改店铺logo"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{
"shop_img": fields.String(
required=True, validate=[validate.Length(1, 128)], comment="店铺logo"
)
},
location="json",
)
def put(self, request, args):
shop = self.current_shop
update_shop_data_interface(shop, args)
return self.send_success()
class AdminConfigShopNameView(AdminBaseView):
"""后台-设置-店铺信息-修改店铺名"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{
"shop_name": fields.String(
required=True, validate=[validate.Length(1, 300)], comment="店铺名"
)
},
location="json",
)
def put(self, request, args):
shop = self.current_shop
shop = update_shop_data_interface(shop, args)
# 记录日志
log_info = {
"shop_id": shop.id,
"operator_id": self.current_user.id,
"operate_type": ConfigLogType.SHOP_NAME,
"operate_content": shop.shop_name,
}
create_config_log_interface(log_info)
return self.send_success()
class AdminConfigShopPhoneView(AdminBaseView):
"""后台-设置-店铺信息-修改店铺联系电话"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{"shop_phone": fields.String(required=True, comment="店铺名")}, location="json"
)
def put(self, request, args):
shop = self.current_shop
shop = update_shop_data_interface(shop, args)
# 记录日志
log_info = {
"shop_id": shop.id,
"operator_id": self.current_user.id,
"operate_type": ConfigLogType.SHOP_PHONE,
"operate_content": shop.shop_phone,
}
create_config_log_interface(log_info)
return self.send_success()
class AdminConfigShopAddressView(AdminBaseView):
"""后台-设置-店铺信息-更改店铺地址"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{
"shop_province": fields.Integer(required=True, comment="省份编号"),
"shop_city": fields.Integer(required=True, comment="城市编号"),
"shop_county": fields.Integer(required=True, comment="区份编号"),
"shop_address": fields.String(
required=True, validate=[validate.Length(0, 100)], comment="详细地址"
),
},
location="json"
)
def put(self, request, args):
shop = self.current_shop
update_shop_data_interface(shop, args)
return self.send_success()
class AdminConfigPrinterView(AdminBaseView):
"""后台-设置-打印设置-修改打印机"""
def validate_feiyin(self, code):
"""飞印打印机验证终端号是否存在"""
_headers = {
"Cookie": "__utmt=1; sessionid=d2e9cb1cd2c64639f4e18019d35343ee; username=; usertype=1; account=7502; key=e506eb41e1c43558a6abd7618321b6aa; __utma=240375839.1986845255.1436857060.1437040538.1437050867.4; __utmb=240375839.5.10.1437050867; __utmc=240375839; __utmz=240375839.1436857060.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)",
"Host": "my.feyin.net",
"Referer": "http://my.feyin.net/crm/accountIndex",
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.89 Safari/537.36",
"Connection": "keep-alive",
}
_data = {
"deviceCode": code,
"installAddress": "",
"simCode": "",
"groupname": "",
}
_r = requests.post(
"http://my.feyin.net/activeDevice",
data=_data,
headers=_headers,
timeout=(1, 5),
)
content = _r.content.decode("utf-8")
pattern = re.compile("终端编号不存在", re.S)
result = re.search(pattern, content)
return result
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{
"brand": fields.Integer(
required=True,
validate=[
validate.OneOf(
[
PrinterBrand.YILIANYUN,
# PrinterBrand.FEIYIN,
# PrinterBrand.FOSHANXIXUN,
# PrinterBrand.S1,
# PrinterBrand.S2,
# PrinterBrand.SENGUO,
]
)
],
comment="打印机品牌/型号",
),
"code": fields.String(
required=True, validate=[validate.Length(0, 32)], comment="打印机终端号"
),
"key": fields.String(
required=False,
missing="",
validate=[validate.Length(0, 32)],
comment="打印机秘钥",
),
"auto_print": fields.Boolean(required=True, comment="订单自动打印"),
},
location="json",
)
def put(self, request, args):
shop_id = self.current_shop.id
args["auto_print"] = int(args.get("auto_print", 1))
if args.get("brand") == PrinterBrand.FEIYIN:
ret = None
try:
ret = self.validate_feiyin(args.get("code"))
except:
# 留着以后记录日志
pass
if ret:
return self.send_fail(error_text="终端号不存在")
printer = create_printer_by_shop_id(shop_id, args)
# 记录日志
log_info = {
"shop_id": shop_id,
"operator_id": self.current_user.id,
"operate_type": ConfigLogType.PRINTER_SET,
"operate_content": "型号:{},终端号:{}".format(printer.brand_text, printer.code),
}
create_config_log_interface(log_info)
return self.send_success(printer_id=printer.id)
class AdminConfigReceiptBottomMsgView(AdminBaseView):
"""后台-设置-小票设置-小票底部信息设置"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{
"bottom_msg": fields.String(
required=True, validate=[validate.Length(0, 128)], comment="小票底部信息"
)
},
location="json"
)
def put(self, request, args):
shop_id = self.current_shop.id
update_receipt_by_shop_id(shop_id, args)
return self.send_success()
class AdminConfigReceiptBottomQrcodeView(AdminBaseView):
"""后台-设置-小票设置-小票底部二维码设置"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{
"bottom_qrcode": fields.String(
required=True, validate=[validate.Length(0, 128)], comment="小票底部二维码"
)
},
location="json"
)
def put(self, request, args):
shop_id = self.current_shop.id
update_receipt_by_shop_id(shop_id, args)
return self.send_success()
class AdminConfigReceiptBrcodeActiveView(AdminBaseView):
"""后台-设置-小票设置-打印订单号条码"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{"brcode_active": fields.Boolean(required=True, comment="打印单号条码")}, location="json"
)
def put(self, request, args):
args["brcode_active"] = int(args.get("brcode_active", 1))
shop_id = self.current_shop.id
update_receipt_by_shop_id(shop_id, args)
return self.send_success()
class AdminConfigReceiptCopiesView(AdminBaseView):
"""后台-设置-小票设置-小票底部信息设置"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{
"copies": fields.Integer(
required=True, validate=[validate.Range(0, 5)], comment="默认打印份数"
)
},
location="json"
)
def put(self, request, args):
args["brcode_active"] = int(args.get("brcode_active", 1))
shop_id = self.current_shop.id
update_receipt_by_shop_id(shop_id, args)
return self.send_success()
class AdminPayModeConfigView(AdminBaseView):
"""后台-商铺的支付方式设置"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{
"key": fields.String(
required=True,
validate=[validate.OneOf(["weixin_jsapi", "on_delivery"])],
comment="设置的字段名",
),
"value": fields.Boolean(required=True, comment="设置的字段值"),
},
location="json"
)
def put(self, request, args):
shop = self.current_shop
config_info = {args.get("key"): args.get("value")}
is_wx = "weixin_jsapi" in config_info.keys()
# 未认证或者未开通线上支付时,还是可以点击关闭货到付款按钮
if is_wx and shop.cerify_active != ShopVerifyActive.YES:
return self.send_fail(
error_text="已认证的商铺才可申请开通在线支付,您的店铺暂未认证,请前往认证"
)
if is_wx and shop.pay_active != ShopPayActive.YES:
return self.send_fail(error_text="店铺未开通线上支付")
some_config = update_some_config_by_shop_id(shop.id, config_info)
# 最少需要保留一种支付方式
if not some_config.weixin_jsapi and not some_config.on_delivery:
return self.send_fail(error_text="至少保留一种商城支付方式,否则客户无法支付")
return self.send_success()
class AdminSomeConfigView(AdminBaseView):
"""后台-一些奇怪的店铺设置"""
@use_args(
{
"key": fields.String(
required=True,
validate=[validate.OneOf(["show_off_product", "new_order_voice"])],
comment="设置的字段名",
),
"value": fields.Boolean(required=True, comment="设置的字段值"),
},
location="json"
)
def put(self, request, args):
shop = self.current_shop
config_info = {args.get("key"): args.get("value")}
update_some_config_by_shop_id(shop.id, config_info)
return self.send_success()
class AdminConfigShopSetupView(AdminBaseView):
"""后台-获取商铺常规设置信息"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
def get(self, request):
shop = self.current_shop
share_setup = get_share_setup_by_id(shop.id)
if not share_setup:
share_setup = create_share_setup(shop.id, shop.shop_name)
share_setup_data = ShareSetupSerializer(share_setup).data
return self.send_success(data=share_setup_data)
class AdminConfigCustomTitleNameView(AdminBaseView):
"""后台-修改店铺分享信息中的自定义标题名称"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{
"custom_title_name": fields.String(
required=True, validate=[validate.Length(1, 30)], comment="自定义分享标题名称"
)
},
location="json"
)
def put(self, request,args):
shop = self.current_shop
update_share_setup(shop.id, args)
return self.send_success()
class AdminConfigCustomShareDescriptionView(AdminBaseView):
"""后台-修改店铺分享信息中的自定义分享描述"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_CONFIG]
)
@use_args(
{
"custom_share_description": fields.String(
required=True, validate=[validate.Length(1, 45)], comment="自定义分享描述"
)
},
location="json"
)
def put(self, request,args):
shop = self.current_shop
update_share_setup(shop.id, args)
return self.send_success()
class WechatJsapiSigntureView(GlobalBaseView):
"""商城-获取微信jsapi"""
@use_args({"url": fields.String(required=True, comment="当前页面的URL")}, location="query")
def get(self, request, args):
redis_conn = get_redis_connection("default")
access_token = redis_conn.get("access_token")
access_token = access_token.decode("utf-8") if access_token else None
wechat_client = WeChatClient(
appid=MP_APPID, secret=MP_APPSECRET, access_token=access_token, timeout=5
)
if not access_token:
access_token = wechat_client.fetch_access_token()
redis_conn.setex("access_token", 3600, access_token.get("access_token"))
jsapi_ticket = redis_conn.get("jsapi_ticket")
jsapi_ticket = jsapi_ticket.decode("utf-8") if jsapi_ticket else None
if not jsapi_ticket:
jsapi_ticket = wechat_client.jsapi.get_jsapi_ticket()
redis_conn.setex("jsapi_ticket", 7100, jsapi_ticket)
noncestr = "".join(random.sample(string.ascii_letters + string.digits, 10))
timestamp = int(time.time())
signature = wechat_client.jsapi.get_jsapi_signature(
noncestr, jsapi_ticket, timestamp, args.get("url")
)
data = {
"appID": MP_APPID,
"timestamp": timestamp,
"nonceStr": noncestr,
"signature": signature,
}
return self.send_success(data=data)
class QiniuImgTokenView(GlobalBaseView):
"""后台-获取七牛token"""
def get(self, request):
q = qiniu.Auth(QINIU_ACCESS_KEY, QINIU_SECRET_KEY)
token = q.upload_token(QINIU_BUCKET_SHOP_IMG, expires=60 * 30)
return self.send_success(token=token)
class TencentCOSCredential(GlobalBaseView):
"""商城端-腾讯云COS临时凭证"""
def get(self, request):
config = {
'url': 'https://sts.tencentcloudapi.com/',
# 域名,非必须,默认为 sts.tencentcloudapi.com
'domain': 'sts.tencentcloudapi.com',
# 临时密钥有效时长,单位是秒
'duration_seconds': 1800,
'secret_id': TENCENT_COS_SECRETID,
# 固定密钥
'secret_key': TENCENT_COS_SECRETKEY ,
# 设置网络代理
# 'proxy': {
# 'http': 'xx',
# 'https': 'xx'
# },
# 换成你的 bucket
'bucket': 'zhihao-1300126182',
# 换成 bucket 所在地区
'region': 'ap-nanjing',
# 这里改成允许的路径前缀,可以根据自己网站的用户登录态判断允许上传的具体路径
# 例子: a.jpg 或者 a/* 或者 * (使用通配符*存在重大安全风险, 请谨慎评估使用)
'allow_prefix': '*',
# 密钥的权限列表。简单上传和分片需要以下的权限,其他权限列表请看 https://cloud.tencent.com/document/product/436/31923
'allow_actions': [
# 简单上传
'name/cos:PutObject',
'name/cos:PostObject',
],
}
response = {}
try:
sts = Sts(config)
response = sts.get_credential()
print('get data : ' + json.dumps(dict(response), indent=4))
except Exception as e:
print(e)
return self.send_success(data=response)
<file_sep>/wsc_django/wsc_django/apps/product/constant.py
class ProductStatus:
ON = 1
OFF = 2
DELETED = 0
class ProductGroupDefault:
YES = 1
NO = 0
class ProductOperationType:
ON = 1 # 上架
OFF = 2 # 下架<file_sep>/wsc_django/wsc_django/apps/logs/views.py
import datetime
from django.utils.timezone import make_aware
from webargs.djangoparser import use_args
from webargs import fields, validate
from logs.constant import OrderLogType
from logs.interface import list_operator_by_shop_id_with_user_interface
from logs.serializers import (
OrderLogSerializer,
OperatorSerializer,
ConfigLogSerializer,
ProductLogSerializer,
PromotionLogSerializer,
)
from wsc_django.utils.arguments import StrToList
from wsc_django.utils.pagination import StandardResultsSetPagination
from wsc_django.utils.views import AdminBaseView
from logs.services import (
get_all_module_dict,
list_one_module_log_by_filter,
dict_more_modules_log_by_filter,
)
all_module_dict = get_all_module_dict()
class AdminLogsView(AdminBaseView):
"""后台-员工-操作日志"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_STAFF]
)
@use_args(
{
"operator_ids": fields.Function(
deserialize=lambda x: x.replace(" ", "").split(","),
missing=[],
comment="操作人ID",
),
"operate_module_ids": StrToList(
missing=[],
validate=[validate.ContainsOnly(list(all_module_dict.values()))],
comment="模块ID",
),
},
location="query"
)
def get(self, request, args):
args["operate_module_ids"] = [int(_) for _ in args.get("operate_module_ids")]
# django时区问题
args["end_date"] = make_aware(datetime.datetime.today() + datetime.timedelta(1))
args["from_date"] = make_aware(datetime.datetime.today() - datetime.timedelta(90))
shop_id = self.current_shop.id
# 查询单个还是多个
operate_module_ids = args.pop("operate_module_ids")
# 查单个
if len(operate_module_ids) == 1:
module_id = operate_module_ids[0]
log_list = list_one_module_log_by_filter(
shop_id, module_id, **args
)
module_id_2_log_list = {module_id: log_list}
# 查询多个
else:
module_id_2_log_list = dict_more_modules_log_by_filter(
shop_id, operate_module_ids, **args
)
"""
module_2_log_list = {
1: [log, log ...],
2: [log, log ...],
...
}
"""
# 封装数据
module_id_2_name = {v: k.lower() for k, v in all_module_dict.items()}
all_log_list = []
for module_id, log_list_query in module_id_2_log_list.items():
def_name = "format_{}_data".format(module_id_2_name.get(module_id))
log_list = getattr(self, def_name)(log_list_query)
all_log_list.extend(log_list)
all_log_list = sorted(
all_log_list, key=lambda x: x["operate_time"], reverse=True
)
return self.send_success(data_list=all_log_list)
def format_order_data(self, log_list_query):
"""封装订单日志数据"""
for log in log_list_query:
if log.operate_type in [
OrderLogType.HOME_DELIVERY_AMOUNT,
OrderLogType.HOME_MINIMUM_FREE_AMOUNT,
OrderLogType.HOME_MINIMUM_ORDER_AMOUNT,
OrderLogType.PICK_MINIMUM_FREE_AMOUNT,
OrderLogType.PICK_SERVICE_AMOUNT,
]:
log.old_value = log.operate_content.split("|")[0]
log.new_value = log.operate_content.split("|")[1]
log.operate_content = ""
order_log_serializer = OrderLogSerializer(log_list_query, many=True)
log_list = order_log_serializer.data
return log_list
def format_config_data(self, log_list_query):
"""封装设置日志数据"""
log_list = ConfigLogSerializer(log_list_query, many=True).data
return log_list
def format_product_data(self, log_list_query):
"""封装货品日志数据"""
log_list = ProductLogSerializer(log_list_query, many=True).data
return log_list
def format_promotion_data(self, log_list_query):
"""封装货品日志数据"""
log_list = PromotionLogSerializer(log_list_query, many=True).data
return log_list
class AdminOperatorsView(AdminBaseView):
"""后台-员工-操作记录-操作人员获取"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_STAFF]
)
def get(self, request):
shop_id = self.current_shop.id
operator_list = list_operator_by_shop_id_with_user_interface(shop_id)
operator_list = self._get_paginated_data(operator_list, OperatorSerializer)
return self.send_success(data_list=operator_list)
<file_sep>/wsc_django/wsc_django/apps/delivery/constant.py
# 配送方式
class DeliveryType:
StaffDelivery = 1 # 员工配送
ExpressDelivery = 2 # 快递配送
<file_sep>/wsc_django/wsc_django/apps/pvuv/models.py
from django.db import models
# Create your models here.
from product.models import Product
from shop.models import Shop
from user.models import User
from wsc_django.utils.models import TimeBaseModel
class ProductBrowseRecord(TimeBaseModel):
"""货品访问记录模型类"""
shop = models.ForeignKey(Shop, on_delete=models.CASCADE, null=False, verbose_name="对应的店铺对象")
user = models.ForeignKey(User, on_delete=models.CASCADE, null=False, verbose_name="对应的用户对象")
product = models.ForeignKey(Product, on_delete=models.CASCADE, null=False, verbose_name="对应货品对象")
start_time = models.DateTimeField(null=False, verbose_name="开始浏览时间")
duration = models.IntegerField(null=False, verbose_name="停留时间")
pre_page_name = models.CharField(max_length=32, verbose_name="上一页名称")
next_page_name = models.CharField(max_length=32, verbose_name="下一页名称")
class Meta:
db_table = "product_browse_record"
verbose_name = "货品访问记录"
verbose_name_plural = verbose_name
<file_sep>/wsc_django/wsc_django/apps/groupon/services.py
import datetime
from django.utils.timezone import make_aware
from customer.models import Customer
from customer.services import get_customer_by_user_id_and_shop_id, create_customer
from groupon.constant import GrouponStatus, GrouponAttendStatus, GrouponAttendLineStatus
from groupon.models import Groupon, GrouponAttend, GrouponAttendDetail
from logs.constant import PromotionLogType
from logs.services import create_promotion_log
from product.constant import ProductStatus
from product.models import Product
from product.services import get_product_by_id, list_product_by_filter
from promotion.services import stop_product_promotion
def validate_groupon_period(
product_id,
from_datetime: datetime.datetime,
to_datetime: datetime.datetime,
groupon_id: int = 0,
):
"""
验证拼团时间区间
:param product_id:
:param from_datetime:
:param to_datetime:
:param groupon_id:
:return:
"""
if from_datetime >= to_datetime:
return False, "结束时间必须大于起始时间"
elif to_datetime < datetime.datetime.now():
return False, "结束时间必须大于当前时间"
elif to_datetime - from_datetime > datetime.timedelta(days=31):
return False, "最长支持一个月的拼团活动"
groupons = (
Groupon.objects.filter(
product_id=product_id, status=GrouponStatus.ON,
)
.exclude(id=groupon_id)
.all()
)
for groupon in groupons:
periods = [
make_aware(from_datetime),
make_aware(to_datetime),
groupon.from_datetime,
groupon.to_datetime,
]
if (to_datetime - from_datetime) + (
groupon.to_datetime - groupon.from_datetime
) > (
max(periods) - min(periods)
): # 判断两个区间的重合
return (
False,
"与 {from_datetime} 到 {to_datetime} 的拼团重复".format(
from_datetime=groupon.from_datetime, to_datetime=groupon.to_datetime
),
)
return True, ""
def launch_groupon_attend(shop_id: int, user_id: int, groupon_id: int):
"""
验证待开团拼团活动
:param shop_id:
:param user_id:
:param groupon_id:
:return:
"""
success, groupon = get_shop_groupon_by_id(shop_id, groupon_id)
if not success:
return False, groupon
elif groupon.status != GrouponStatus.ON:
return False, "活动已结束, 看看其他商品吧"
elif groupon.to_datetime <= datetime.datetime.now():
return False, "活动已结束, 看看其他商品吧"
elif groupon.from_datetime > datetime.datetime.now():
return False, "活动已结束, 看看其他商品吧"
customer = get_customer_by_user_id_and_shop_id(user_id, shop_id)
if not customer:
customer = create_customer(user_id, shop_id)
valid_datetime = datetime.datetime.now() + datetime.timedelta(
hours=groupon.success_valid_hour
)
groupon_attend = GrouponAttend(
groupon=groupon,
size=0,
success_size=groupon.success_size,
to_datetime=min(groupon.to_datetime, valid_datetime),
status=GrouponAttendStatus.CREATED,
)
groupon_attend.save()
attend_detail = _create_groupon_attend_detail(
groupon_attend, customer, is_sponsor=True
)
# 拼团参与校验
groupon_attend.sponsor_detail = attend_detail
success, msg = groupon_attend.limit(customer, 0)
if not success:
return False, msg
# 一旦开团,无法再进行编辑
if groupon.is_editable:
groupon.set_uneditable()
groupon.save()
return True, groupon_attend
def _create_groupon_attend_detail(
groupon_attend: GrouponAttend,
customer: Customer,
is_sponsor: bool=False,
):
"""
创建拼团参与详情
:param groupon_attend:
:param customer:
:param is_sponsor:
:return:
"""
groupon_attend_detail = GrouponAttendDetail(
groupon_attend=groupon_attend,
customer=customer,
is_sponsor=is_sponsor,
is_new_customer=customer.is_new_customer(),
status=GrouponAttendLineStatus.UNPAID,
)
groupon_attend_detail.save()
return groupon_attend_detail
def create_groupon(
shop_id: int, user_id: int, product: Product, args: dict
):
"""
创建拼团活动 user_id仅用于记录日志用
:param shop_id:
:param user_id:
:param product:
:param args:
:return:
"""
groupon = Groupon(
shop_id=shop_id,
product_id=product.id,
price=args["price"],
from_datetime=args["from_datetime"],
to_datetime=args["to_datetime"],
groupon_type=args["groupon_type"],
success_size=args["success_size"],
quantity_limit=args["quantity_limit"],
success_limit=args["success_limit"],
attend_limit=args["attend_limit"],
success_valid_hour=args["success_valid_hour"],
status=GrouponStatus.ON,
)
groupon.save()
# 记录日志
log_info = {
"shop_id": shop_id,
"operator_id": user_id,
"operate_type": PromotionLogType.ADD_GROUPON,
"operate_content": product.name,
}
create_promotion_log(log_info)
return groupon
def update_groupon(
shop_id: int, user_id: int, product: Product, groupon: Groupon, args: dict
):
"""
编辑拼团活动 user_id仅用于记录日志
:param shop_id:
:param user_id:
:param product:
:param groupon:
:param args:
:return:
"""
for k, v in args.items():
setattr(groupon, k ,v)
# 拼团编辑后直接上线
groupon.status = GrouponStatus.ON
groupon.save()
# 记录日志
log_info = {
"shop_id": shop_id,
"operator_id": user_id,
"operate_type": PromotionLogType.UPDATE_GROUPON,
"operate_content": product.name,
}
create_promotion_log(log_info)
return groupon
def set_groupon_off(shop_id: int, user_id: int, groupon_id: int, force=False):
"""
停用一个拼团
:param shop_id:
:param user_id:
:param groupon_id:
:param force:
:return:
"""
success, groupon = get_shop_groupon_by_id(shop_id, groupon_id)
if not success:
return False, groupon
elif groupon.status == GrouponStatus.EXPIRED:
return False, "拼团活动已过期,请重新编辑"
groupon.status = GrouponStatus.OFF
groupon.save()
if (
groupon.from_datetime <= make_aware(datetime.datetime.now())
and groupon.to_datetime >= make_aware(datetime.datetime.now())
):
stop_product_promotion(shop_id, groupon.product_id)
# 记录日志
product = get_product_by_id(
shop_id, groupon.product_id, filter_delete=False
)
log_info = {
"shop_id": shop_id,
"operator_id": user_id,
"operate_type": PromotionLogType.STOP_GROUPON,
"operate_content": product.name,
}
create_promotion_log(log_info)
return True, groupon
def force_success_groupon_attend(shop_id: int, groupon_attend_id: int):
"""
强制成团
:param shop_id:
:param groupon_attend_id:
:return:
"""
success, groupon_attend = get_shop_groupon_attend_by_id(
shop_id, groupon_attend_id, for_update=True
)
if not success:
return False, groupon_attend
elif groupon_attend.status != GrouponAttendStatus.WAITTING:
return False, "团状态错误, 无法强制成团"
elif groupon_attend.size >= groupon_attend.success_size:
return False, "拼团已经满员, 无法强制成团, 请等待团员完成支付"
unpaid_details = list_unpaid_details_by_groupon_attend_id(groupon_attend_id)
if unpaid_details:
return False, "还有团员未支付, 无法强制成团, 请等待团员完成支付"
groupon_attend.anonymous_size = groupon_attend.success_size - groupon_attend.size
groupon_attend.size = groupon_attend.success_size
groupon_attend.save()
return True, groupon_attend
def get_shop_groupon_by_id(shop_id: int, groupon_id: int):
"""
获取一个拼团活动
:param shop_id:
:param groupon_id:
:return:
"""
groupon = Groupon.objects.filter(shop_id=shop_id, id=groupon_id).first()
if not groupon:
return False, "拼团活动不存在"
return True, groupon
def get_groupon_by_id(shop_id: int, groupon_id: int):
"""
不返回错误
:param shop_id:
:param groupon_id:
:return:
"""
groupon = Groupon.objects.filter(shop_id=shop_id, id=groupon_id).first()
return groupon
def get_shop_groupon_attend_by_id(
shop_id: int, groupon_attend_id: int, for_update: bool = False
):
"""
通过id获取一个拼团参与,并附带拼团活动的信息和团长的信息
:param shop_id:
:param groupon_attend_id:
:param for_update: 是否用于更新,为True时加排它锁
:return:
"""
if for_update:
groupon_attend = (
GrouponAttend.objects.select_for_update().get(id=groupon_attend_id).first()
)
else:
groupon_attend = GrouponAttend.objects.filter(id=groupon_attend_id).first()
if not groupon_attend:
return False, "团不存在"
groupon = Groupon.objects.filter(shop_id=shop_id, id=groupon_attend.groupon.id).first()
if not groupon:
return False, "拼团活动不存在"
sponsor_detail = (
GrouponAttendDetail.objects.filter(
groupon_attend_id=groupon_attend_id, is_sponsor=True
)
.first()
)
if not sponsor_detail:
return False, "团长不存在"
groupon_attend.sponsor_detail = sponsor_detail
return True, groupon_attend
def list_shop_groupons(shop_id: int, args: dict):
"""
获取拼团活动列表
:param shop_id:
:param args:
:return:
"""
groupons = Groupon.objects.filter(
shop_id=shop_id, status__in=[GrouponStatus.ON, GrouponStatus.OFF, GrouponStatus.EXPIRED]
)
# 货品名搜索
if args["product_name"]:
states = [ProductStatus.ON, ProductStatus.OFF, ProductStatus.DELETED]
group_id = 0
products = list_product_by_filter(shop_id, states, args["product_name"], group_id)
product_ids = [p.id for p in products]
groupons = groupons.filter(product_id__in=product_ids)
groupons = groupons.order_by("status", "-id").all()
return groupons
def list_waitting_groupon_attends(shop_id: int, groupon_id: int):
"""
列出拼团中的团
:param shop_id:
:param groupon_id:
:return:
"""
success, groupon = get_shop_groupon_by_id(shop_id, groupon_id)
if not success:
return False, groupon
groupon_attends = (
GrouponAttend.objects.filter(
groupon_id=groupon_id, status=GrouponAttendStatus.WAITTING
)
.all()
)
# 拼团参与详情
groupon_attend_ids = [g.id for g in groupon_attends]
groupon_attend_details = (
GrouponAttendDetail.objects.filter(
groupon_attend_id__in=groupon_attend_ids,
is_sponsor=True,
status=GrouponAttendLineStatus.PAID
)
.all()
)
map_attend_customer = {
l.groupon_attend_id: l.customer for l in groupon_attend_details
}
# 为拼团参与赋团长
for groupon_attend in groupon_attends:
sponsor = map_attend_customer[groupon_attend.id]
groupon_attend.sponsor = sponsor
return True, groupon_attends
def list_paid_details_by_groupon_attend_id(groupon_attend_id: int):
"""
列出一个拼团参与所有已支付的详情
:param groupon_attend_id:
:return:
"""
paid_details = GrouponAttendDetail.objects.filter(
groupon_attend_id=groupon_attend_id, status=GrouponAttendLineStatus.PAID
).all()
return paid_details
def list_unpaid_details_by_groupon_attend_id(groupon_attend_id: int):
"""
列出一个拼团参与未支付的所有详情
:param groupon_attend_id:
:return:
"""
unpaid_details = GrouponAttendDetail.objects.filter(
groupon_attend_id=groupon_attend_id, status=GrouponAttendLineStatus.UNPAID
).all()
return unpaid_details
def list_created_groupon_attends_by_groupon_id(groupon_id: int):
"""
列出已创建的团
:param groupon_id:
:return:
"""
groupon_attends = GrouponAttend.objects.filter(
groupon_id=groupon_id, status=GrouponAttendStatus.CREATED
).all()
return groupon_attends
def list_groupon_attends_by_groupon(
groupon: Groupon, states: list,
):
"""
通过拼团活动id获取拼团参与
:param groupon:
:param states:
:return:
"""
if not states:
return []
groupon_attends = GrouponAttend.objects.filter(
groupon_id=groupon.id, status__in=states
)
groupon_attends = groupon_attends.all()
# 拼团参与详情
groupon_attend_ids = [g.id for g in groupon_attends]
groupon_attend_details = (
GrouponAttendDetail.objects.filter(
groupon_attend_id__in=groupon_attend_ids,
status=GrouponAttendLineStatus.PAID,
is_sponsor=True,
).all()
)
map_attend_customer = {
l.groupon_attend_id: l.customer_id for l in groupon_attend_details
}
# 为拼团参与赋团长
for groupon_attend in groupon_attends:
sponsor = map_attend_customer[groupon_attend.id]
groupon_attend.sponsor = sponsor
return groupon_attends
def list_alive_groupon_by_product_ids(product_ids: list):
"""
查询此刻或者未来有拼团活动的货品id的集合
:param product_ids:
:return:
"""
groupon_list = Groupon.objects.filter(
product_id__in=product_ids,
status=GrouponStatus.ON,
to_datetime__gte=make_aware(datetime.datetime.now())
).all()
groupon_product_set = {groupon.product_id for groupon in groupon_list}
return groupon_product_set
def count_groupon_attend_by_groupon_id_and_customer_id(
groupon_id: int, customer_id: int
):
"""
计算一个人某个拼团的参与次数
:param groupon_id:
:param customer_id:
:return:
"""
total_attend_count = (
GrouponAttendDetail.objects.filter(
groupon_attend__groupon_id=groupon_id,
groupon_attend__status__in=[
GrouponAttendStatus.WAITTING, GrouponAttendStatus.SUCCEEDED
],
customer_id=customer_id,
status__in=[GrouponAttendLineStatus.PAID, GrouponAttendLineStatus.UNPAID]
)
.count()
)
return total_attend_count<file_sep>/wsc_django/wsc_django/apps/logs/constant.py
class OperateLogModule:
"""操作记录统一表模块"""
# DASHBORD = 1
ORDER = 2
PRODUCT = 3
# CUSTOMER = 4
PROMOTION = 5
STAFF = 6
CONFIG = 7
class OrderLogType:
"""订单日志类型"""
PRINT = 1 # 打印订单
DIRECT = 2 # 一键完成
CONFIRM = 3 # 开始处理,确认订单
FINISH = 4 # 完成订单
REFUND = 5 # 退款
HOME_MINIMUM_ORDER_AMOUNT = 6 # 配送模式起送金额
HOME_DELIVERY_AMOUNT = 7 # 配送模式配送费
HOME_MINIMUM_FREE_AMOUNT = 8 # 配送模式免配送费最小金额
PICK_SERVICE_AMOUNT = 9 # 自提模式服务费
PICK_MINIMUM_FREE_AMOUNT = 10 # 自提模式免服务费最小金额
REFUND_FAIL = 11 # 退款失败
_SYS_PRINT = 99 # 不对外开放,不用于日志记录
_SYS_REFUND = 98 # 不对外开放,不用于日志记录
MAP_NO_OPERATOR_ORDER_TYPE = {
OrderLogType.PRINT: OrderLogType._SYS_PRINT,
OrderLogType.REFUND: OrderLogType._SYS_REFUND,
OrderLogType.REFUND_FAIL: OrderLogType.REFUND_FAIL,
}
ORDER_LOG_TYPE = {
OrderLogType.PRINT: "打印订单",
OrderLogType.DIRECT: "一键完成",
OrderLogType.CONFIRM: "开始处理",
OrderLogType.FINISH: "完成订单",
OrderLogType.REFUND: "订单退款",
OrderLogType.HOME_MINIMUM_ORDER_AMOUNT: "起送金额",
OrderLogType.HOME_DELIVERY_AMOUNT: "配送费",
OrderLogType.HOME_MINIMUM_FREE_AMOUNT: "免配送费金额",
OrderLogType.PICK_SERVICE_AMOUNT: "服务费",
OrderLogType.PICK_MINIMUM_FREE_AMOUNT: "免服务费金额",
OrderLogType._SYS_PRINT: "系统自动打印订单",
OrderLogType._SYS_REFUND: "拼团失败,系统自动退款",
OrderLogType.REFUND_FAIL: "退款失败,系统自动退款失败",
}
class StaffLogType:
"""员工日志类型"""
ADD_STAFF = 1
DELETE_STAFF = 2
STAFF_LOG_TYPE = {StaffLogType.ADD_STAFF: "添加员工", StaffLogType.DELETE_STAFF: "删除员工"}
class ProductLogType:
"""货品操作日志类型"""
ADD_PRODUCT = 1
DELETE_PRODUCT = 2
ADD_PRODUCT_GROUP = 3
DELETE_PRODUCT_GROUP = 4
UPDATE_PRODUCT_GROUP = 5
ON_PRODUCT = 6
OFF_PRODUCT = 7
PRODUCT_LOG_TYPE = {
ProductLogType.ADD_PRODUCT: "添加货品",
ProductLogType.DELETE_PRODUCT: "删除货品",
ProductLogType.ADD_PRODUCT_GROUP: "添加货品分组",
ProductLogType.DELETE_PRODUCT_GROUP: "删除货品分组",
ProductLogType.UPDATE_PRODUCT_GROUP: "修改货品分组",
ProductLogType.ON_PRODUCT: "上架货品",
ProductLogType.OFF_PRODUCT: "下架货品",
}
class ConfigLogType:
"""设置模块日志类型"""
SHOP_NAME = 1
SHOP_PHONE = 2
PRINTER_SET = 3
CONFIG_LOG_TYPE = {
ConfigLogType.SHOP_NAME: "店铺名称",
ConfigLogType.SHOP_PHONE: "联系电话",
ConfigLogType.PRINTER_SET: "打印机设置",
}
class PromotionLogType:
"""玩法模块日志类型"""
ADD_GROUPON = 1
STOP_GROUPON = 2
UPDATE_GROUPON = 3
PROMOTION_LOG_TYPE = {
PromotionLogType.ADD_GROUPON: "新建拼团",
PromotionLogType.STOP_GROUPON: "停用拼团",
PromotionLogType.UPDATE_GROUPON: "编辑拼团",
}
<file_sep>/wsc_django/wsc_django/apps/settings.py
"""
Django settings for wsc_django project.
Generated by 'django-admin startproject' using Django 1.11.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import datetime
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
UTILS_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
#设置python导包环境
import sys
sys.path.insert(0, os.path.join(BASE_DIR, "apps"))
sys.path.insert(1, UTILS_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'pspe=jr1_77&m+zqzpyr@7$vl*@s$2v+d*=%0*lauoy+z936sr'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# 告知Django认证系统使用我们自定义的模型类。
AUTH_USER_MODEL = 'user.User'
# cookies相关
AUTH_COOKIE_EXPIRE_DAYS = 60*60*24*7
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# 第三方应用
'rest_framework',
'corsheaders', # cors
'channels', # websocket
# 本地应用
'config.apps.ConfigConfig',
'customer.apps.CustomerConfig',
'dashboard.apps.DashboardConfig',
'delivery.apps.DeliveryConfig',
'groupon.apps.GrouponConfig',
'logs.apps.LogsConfig',
'order.apps.OrderConfig',
'payment.apps.PaymentConfig',
'printer.apps.PrinterConfig',
'product.apps.ProductConfig',
'promotion.apps.PromotionConfig',
'pvuv.apps.PvuvConfig',
'shop.apps.ShopConfig',
'staff.apps.StaffConfig',
'storage.apps.StorageConfig',
'user.apps.UserConfig',
'ws.apps.WsConfig',
# 测试用
'demo.apps.DemoConfig',
]
# 中间件
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
# 'wsc_django.xMiddleware.middleware.MyMiddleware', # 测试使用,跳过登录
# 'wsc_django.xMiddleware.middleware.ConfigMiddleware', # 请求前和响应后进行一些配置
]
ROOT_URLCONF = 'wsc_django.urls'
# 模板
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'wsc_django.wsgi.application'
# 缓存
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/0",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
},
"session": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
},
"verify_codes": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/2",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
},
"num_generate": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/3",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
},
"subscribe": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/10",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
}
}
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
SESSION_CACHE_ALIAS = "session"
# Database数据库
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'HOST': '127.0.0.1', # 数据库主机
'PORT': 3306, # 数据库端口
'USER': 'wsc_django', # 数据库用户名
'PASSWORD': '<PASSWORD>', # 数据库用户密码
'NAME': 'wsc_django', # 数据库名字
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# 指定ASGI的路由地址
ASGI_APPLICATION = 'wsc_django.routing.application'
# channels websocket
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'channels_redis.core.RedisChannelLayer',
'CONFIG': {
"hosts": [('localhost', 6379)],
},
},
}
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
# 本地语言
LANGUAGE_CODE = 'zh-hans'
# 时区
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
REST_FRAMEWORK = {
'EXCEPTION_HANDLER': 'wsc_django.utils.exceptions.wsc_exception_handler', # 异常处理
}
# redis数据库相关
REDIS_SERVER = "localhost"
REDIS_PORT = "6379"
REDIS_PASSWORD = ""
# jwt相关配置
JWT_AUTH = {
'JWT_EXPIRATION_DELTA': datetime.timedelta(days=1), # 有效期
'JWT_PAYLOAD_HANDLER': 'user.utils.jwt_payload_handler', # 自定义的荷载生成函数
'JWT_RESPONSE_PAYLOAD_HANDLER': 'user.utils.jwt_response_payload_handler', # 自定义的响应体格式
}
# 腾讯云短信
TENCENT_SMS_APPID = "1400515719"
TENCENT_SMS_APPKEY = "0351a7ac6e3f4be41a77dd4626792ea2"
# 云片短信
YUNPIAN_SYSTEM_APIKEY = "<KEY>"
# 微信公众平台相关
MP_APPID = 'wx819299c9d4c7bd24'
MP_APPSECRET = '<KEY>'
MINI_PROGRAM_APPID = 'wxcb39f504138582a6'
MINI_PROGRAM_APPSECRET = '9fab84d89f304f40dec86707e16ceaca'
# CORS
CORS_ORIGIN_WHITELIST = (
'http://127.0.0.1:8000',
'http://localhost:8000',
'http://localhost:3030',
'http://127.0.0.1:3030',
'http://192.168.3.11:8000',
'http://192.168.3.11:80',
'http://81.68.135.24:3030',
'ws://localhost:8000',
'ws://127.0.0.1:8000',
)
CORS_ORIGIN_ALLOW_ALL = True
CORS_ALLOW_CREDENTIALS = True # 允许携带cookie
CORS_ALLOW_METHODS = (
'DELETE',
'GET',
'OPTIONS',
'PATCH',
'POST',
'PUT',
'VIEW',
)
# 利楚商务测试服务器
LCSW_CALLBACK_HOST = "http://127.0.0.1:8000"
LCSW_HANDLE_HOST = "https://pay.lcsw.cn/lcsw"
LCSW_INST_NO = "52100011"
LCSW_INST_KEY = "<KEY>"
# 前端商城域名
WSC_HOST_NAME = ""
# 七牛云相关
QINIU_ACCESS_KEY = "<KEY>
QINIU_SECRET_KEY = "<KEY>"
QINIU_BUCKET_SHOP_IMG = "shopimg"
QINIU_SHOP_IMG_HOST = "http://img.senguo.cc/"
# 百度AI开放平台 10464550
BAIDU_APIKEY = "<KEY>"
BAIDU_SECRETKEY = "<KEY>"
# cookie相关
AUTH_COOKIE_DOMAIN = "http://hzhst1314.cn"
AUTH_COOKIE_EXPIRE = 60*60*24*7
# 腾讯云COS
TENCENT_COS_SECRETID = '<KEY>'
TENCENT_COS_SECRETKEY = '<KEY>'
TENCENT_COS_IMG_HOST = 'https://zhihao-1300126182.cos.ap-nanjing.myqcloud.com/'<file_sep>/wsc_django/wsc_django/apps/staff/views.py
from rest_framework import status
from rest_framework.response import Response
from webargs import fields, validate
from webargs.djangoparser import use_args
from logs.constant import StaffLogType
from staff.constant import StaffApplyStatus, StaffRole, StaffStatus
from staff.interface import create_staff_log_interface
from staff.serializers import (
StaffSerializer,
StaffApplyCreateSerializer,
StaffApplySerializer,
)
from staff.services import (
get_staff_by_id_and_shop_id,
get_staff_apply_by_shop_id_and_id,
get_staff_by_user_id_and_shop_id,
get_staff_apply_by_user_id_and_shop_id,
list_staff_apply_by_shop_id,
expire_staff_apply_by_staff,
list_staff_by_shop_id,
cal_all_roles_without_super,
cal_all_permission,
)
from wsc_django.utils.constant import PHONE_RE
from wsc_django.utils.pagination import StandardResultsSetPagination
from wsc_django.utils.views import StaffBaseView, MallBaseView, AdminBaseView
all_roles = cal_all_roles_without_super()
all_permission = cal_all_permission()
class StaffApplyView(MallBaseView):
"""后台-员工-提交员工申请&获取申请信息"""
def get_tmp_class(self, status):
"""获取一个员工申请模板类"""
class TMP:
def __init__(self, status):
self.status = status
return TMP(status)
def get(self, request, shop_code):
user = self.current_user
self._set_current_shop(request, shop_code)
current_shop = self.current_shop
staff_apply = get_staff_apply_by_user_id_and_shop_id(user.id, current_shop.id)
# 没有审核记录的是超管或者第一次申请的人
if not staff_apply:
# 超管
if current_shop.super_admin_id == user.id:
staff_apply = self.get_tmp_class(StaffApplyStatus.PASS)
else:
staff_apply = self.get_tmp_class(StaffApplyStatus.UNAPPlY)
serializer = StaffApplySerializer(staff_apply)
return self.send_success(data=serializer.data, shop_info={"shop_name":current_shop.shop_name})
@use_args(
{
"realname": fields.String(
required=True, validate=[validate.Length(1, 64)], comment="真实姓名"
),
"phone": fields.String(
required=False,
validate=[validate.Regexp(PHONE_RE)],
comment="手机号,已绑定的时候是不需要的",
),
"code": fields.String(
required=False, validate=[validate.Regexp(r"^[0-9]{4}$")], comment="验证码"
),
"birthday": fields.Date(required=False, allow_none=True, comment="生日"),
},
location="json"
)
def post(self, request, args, shop_code):
user = self.current_user
self._set_current_shop(request, shop_code)
current_shop = self.current_shop
# 验证员工是否存在
staff = get_staff_by_user_id_and_shop_id(user.id, current_shop.id)
if staff:
return self.send_fail(error_text="已经为该店铺的员工")
# 验证是否已经提交申请
staff_apply = get_staff_apply_by_user_id_and_shop_id(user.id, current_shop.id)
if staff_apply:
return self.send_fail(error_text="已提交申请,无需重复提交")
serializer = StaffApplyCreateSerializer(data=args, context={'self':self})
if not serializer.is_valid():
return self.send_error(
error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
staff_apply = serializer.save()
data = {
"staff_apply_id": staff_apply.id,
"status": staff_apply.status,
"expired": staff_apply.expired,
"user_id": staff_apply.user_id
}
return self.send_success(data=data)
class AdminStaffApplyView(AdminBaseView):
"""后台-员工-申请列表&通过员工申请"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_STAFF])
def get(self, request):
shop_id = self.current_shop.id
staff_apply_list = list_staff_apply_by_shop_id(shop_id)
staff_apply_list = self._get_paginated_data(staff_apply_list, StaffApplySerializer)
return self.send_success(data_list=staff_apply_list)
@StaffBaseView.permission_required([StaffBaseView.staff_permissions.ADMIN_STAFF])
@use_args(
{
"staff_apply_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="申请ID"
),
"position": fields.String(
required=False,
validate=[validate.Length(0, 16)],
allow_none=True,
comment="员工职位",
),
"entry_date": fields.Date(required=False, allow_none=True, comment="入职日期"),
"remark": fields.String(
required=False,
validate=[validate.Length(0, 20)],
allow_none=True,
comment="备注",
),
"roles": fields.Integer(
required=True, validate=[validate.Range(1, all_roles)], comment="角色"
),
"permissions": fields.Integer(
required=True,
validate=[validate.Range(1, all_permission)],
comment="权限",
)
},
location="json"
)
def put(self, request, args):
shop_id = self.current_shop.id
staff_apply_id = args.pop("staff_apply_id")
staff_apply = get_staff_apply_by_shop_id_and_id(shop_id, staff_apply_id)
if not staff_apply:
return self.send_fail(error_text='员工申请记录不存在')
staff_apply_serializer = StaffApplySerializer(staff_apply, args)
# 此处无需验证,仅为了save的执行
staff_apply_serializer.is_valid()
staff_apply_serializer.save()
# 申请通过,创建员工
staff_info = {"shop_id": shop_id, "user_id": staff_apply.user.id}
staff_info.update(args)
# 校验员工是否存在
staff = get_staff_by_user_id_and_shop_id(
staff_apply.user.id, shop_id, filter_delete=False
)
if not staff:
staff_serializer = StaffSerializer(data=staff_info)
if not staff_serializer.is_valid():
return self.send_error(
error_message=staff_serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
staff = staff_serializer.save()
elif staff.status == StaffStatus.NORMAL:
return self.send_fail(error_text="已经为该店铺的员工")
else:
# 员工状态为被删除,则将状态修改为正常
staff_info["status"] = StaffStatus.NORMAL
for k, v in staff_info.items():
setattr(staff, k, v)
staff.save()
# 创建操作日志
log_info = {
"shop_id": shop_id,
"operator_id": self.current_user.id,
"operate_type": StaffLogType.ADD_STAFF,
"staff_id": staff.id,
}
create_staff_log_interface(log_info)
return self.send_success(staff_id=staff.id)
class AdminStaffView(AdminBaseView):
"""商城端-员工-员工详情&编辑员工&删除员工"""
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_STAFF])
@use_args(
{
"staff_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="员工ID"
)
},
location="query"
)
def get(self, request, args):
staff_id = args.get("staff_id")
shop_id = self.current_shop
staff = get_staff_by_id_and_shop_id(staff_id, shop_id)
if not staff:
return self.send_fail(error_text="员工不存在")
serializer = StaffSerializer(staff)
return self.send_success(data=serializer.data)
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_STAFF])
@use_args(
{
"staff_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="员工ID"
)
},
location="json"
)
def delete(self, request, args):
staff_id = args.get("staff_id")
shop_id = self.current_shop.id
staff = get_staff_by_id_and_shop_id(staff_id, shop_id)
if not staff:
return self.send_fail(error_text="员工不存在")
elif staff.roles == StaffRole.SHOP_SUPER_ADMIN:
return self.send_fail(error_text="超管不可删除")
# 使其申请记录过期,可以再次申请
expire_staff_apply_by_staff(staff.shop.id, staff.user.id)
# 假删除
staff.status = StaffStatus.DELETED
staff.save()
# 创建操作日志
log_info = {
"shop_id": shop_id,
"operator_id": self.current_user.id,
"operate_type": StaffLogType.DELETE_STAFF,
"staff_id": staff.id,
}
create_staff_log_interface(log_info)
return self.send_success()
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_STAFF])
@use_args(
{
"staff_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="员工ID"
),
"position": fields.String(
required=False,
validate=[validate.Length(0, 16)],
allow_none=True,
comment="员工职位",
),
"entry_date": fields.Date(required=False, allow_none=True, comment="入职日期"),
"remark": fields.String(
required=False,
validate=[validate.Length(0, 20)],
allow_none=True,
comment="备注",
),
"roles": fields.Integer(
required=True, validate=[validate.Range(0, all_roles)], comment="角色"
),
"permissions": fields.Integer(
required=True,
validate=[validate.Range(0, all_permission)],
comment="权限",
),
},
location="json"
)
def put(self, request, args):
staff_id = args.pop("staff_id")
shop_id = self.current_shop.id
staff = get_staff_by_id_and_shop_id(staff_id, shop_id)
if not staff:
return self.send_fail(error_text="员工不存在")
# 超管仅自己可以编辑,而且权限不可编辑
elif staff.roles == StaffRole.SHOP_SUPER_ADMIN:
if self.current_user.id != staff.user_id:
return self.send_fail(error_text="超管信息仅自己可以编辑")
serializer = StaffSerializer(staff, data=args)
if not serializer.is_valid():
return self.send_error(
error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return self.send_success()
class AdminStaffListView(AdminBaseView):
"""后台-员工-员工列表"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required([AdminBaseView.staff_permissions.ADMIN_STAFF])
@use_args(
{
"keyword": fields.String(required=False, comment="搜索关键字(姓名或手机号)"),
"page": fields.Integer(required=False, missing=1, comment="页码"),
"page_size": fields.Integer(
required=False, missing=20, validate=[validate.Range(1)], comment="每页条数"
),
},
location="query"
)
def get(self, request, args):
page = args.get("page")
shop_id = self.current_shop.id
staff_list = list_staff_by_shop_id(shop_id, args.get("keyword"))
# page为-1时,不分页
if page > 0:
staff_list = self._get_paginated_data(staff_list, StaffSerializer)
else:
# 适配前端参数要求
staff_list = {'results': StaffSerializer(staff_list, many=True).data}
return self.send_success(data_list=staff_list)
<file_sep>/wsc_django/wsc_django/apps/ws/routing.py
from django.urls import path
from ws import consumers
websocket_urlpatterns = [
path('ws/admin/websocket/', consumers.AdminWebSocketConsumer.as_asgi()), # 后台的websocket
]<file_sep>/wsc_django/wsc_django/apps/order/migrations/0003_auto_20210606_2054.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('shop', '0001_initial'),
('groupon', '0003_groupon_shop'),
('order', '0002_orderdetail_product'),
('customer', '0001_initial'),
('delivery', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='orderdetail',
name='shop',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shop.shop', verbose_name='对应的店铺对象'),
),
migrations.AddField(
model_name='orderaddress',
name='order',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='order.order', verbose_name='对应的订单对象'),
),
migrations.AddField(
model_name='order',
name='customer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='customer.customer', verbose_name='订单对应客户对象'),
),
migrations.AddField(
model_name='order',
name='delivery',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='delivery.delivery', verbose_name='订单对应配送记录对象'),
),
migrations.AddField(
model_name='order',
name='groupon_attend',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='groupon.grouponattend', verbose_name='订单对应拼团参与对象'),
),
migrations.AddField(
model_name='order',
name='shop',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shop.shop', verbose_name='订单对应的店铺对象'),
),
]
<file_sep>/wsc_django/wsc_django/apps/logs/migrations/0002_auto_20210606_2054.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('logs', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='stafflog',
name='operator',
field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='操作人'),
),
migrations.AddField(
model_name='promotionlog',
name='operator',
field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='操作人'),
),
migrations.AddField(
model_name='productlog',
name='operator',
field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='操作人'),
),
migrations.AddField(
model_name='orderlog',
name='operator',
field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='操作人'),
),
migrations.AddField(
model_name='operatelogunify',
name='operator',
field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='操作人'),
),
migrations.AddField(
model_name='configlog',
name='operator',
field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='操作人'),
),
]
<file_sep>/wsc_django/wsc_django/apps/shop/views.py
from webargs import fields, validate
from rest_framework import status
from webargs.djangoparser import use_args
from config.serializers import SomeConfigSerializer, ShareSetupSerializer
from delivery.serializers import AdminDeliveryConfigSerializer
from settings import AUTH_COOKIE_DOMAIN, AUTH_COOKIE_EXPIRE
from shop.constant import ShopStatus, ShopVerifyActive, ShopVerifyType, ShopPayActive, ShopPayChannelType
from staff.constant import StaffRole
from staff.serializers import StaffSerializer
from user.constant import USER_OUTPUT_CONSTANT
from user.serializers import UserSerializer
from wsc_django.utils.core import Baidu
from wsc_django.utils.pagination import StandardResultsSetPagination
from wsc_django.utils.views import UserBaseView, AdminBaseView, MallBaseView, SuperBaseView
from shop.serializers import (
ShopCreateSerializer,
ShopPayChannelSerializer,
SuperShopSerializer,
SuperShopListSerializer,
SuperShopStatusSerializer,
AdminShopSerializer,
MallShopSerializer,
SuperShopVerifySerializer,
)
from shop.services import (
get_shop_by_shop_id,
list_shop_by_shop_ids,
list_shop_reject_reason,
list_shop_by_shop_status,
list_shop_creator_history_realname,
)
from shop.interface import (
list_staff_by_user_id_interface,
get_user_by_id_interface,
list_user_by_ids_interface,
get_customer_by_user_id_and_shop_id_interface,
get_some_config_by_shop_id_interface, get_delivery_config_by_shop_id_interface, get_share_setup_by_id_interface,
count_product_by_shop_ids_interface)
class SuperShopView(SuperBaseView):
"""总后台-商铺-商铺创建&商铺详情"""
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
"shop_name": fields.String(
required=True, validate=[validate.Length(0, 128)], comment="店铺名"
),
"shop_img": fields.String(
required=True,
validate=[validate.Length(0, 300)],
comment="店铺logo",
),
"shop_province": fields.Integer(required=True, comment="省份编码"),
"shop_city": fields.Integer(required=True, comment="城市编码"),
"shop_county": fields.Integer(required=True, comment="区份编码"),
"shop_address": fields.String(
required=True,
validate=[validate.Length(0, 100)],
comment="详细地址",
),
"description": fields.String(
required=True, validate=[validate.Length(0, 200)], comment="描述"
),
"inviter_phone": fields.String(
required=False,
validate=[validate.Length(0, 32)],
comment="推荐人手机号",
),
"longitude": fields.Decimal(
required=False,
data_key="lng",
allow_none=True,
validate=[validate.Range(-180, 180)],
comment="经度",
),
"latitude": fields.Decimal(
required=False,
data_key="lat",
allow_none=True,
validate=[validate.Range(-90, 90)],
comment="纬度",
),
"realname": fields.String(
required=False,
allow_none=True,
validate=[validate.Length(0, 32)],
comment="历史真实姓名",
),
},
location="json",
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def post(self, request, args):
user = self._get_current_user(request)
if not user:
return self.send_error(
status_code=status.HTTP_401_UNAUTHORIZED, error_message={"error_text": "用户未登录"}
)
serializer = ShopCreateSerializer(data=args, context={'user':user})
if not serializer.is_valid():
return self.send_error(
error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return self.send_success(data=serializer.data)
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
"shop_id": fields.Integer(required=True, comment="商铺id"),
},
location="query"
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def get(self, request, args):
user = self._get_current_user(request)
if not user:
return self.send_error(
status_code=status.HTTP_401_UNAUTHORIZED, error_message={"error_text": "用户未登录"}
)
shop_id = args.get("shop_id")
shop = get_shop_by_shop_id(shop_id)
if not shop:
return self.send_fail(error_text="店铺不存在")
shop = get_shop_by_shop_id(shop_id)
super_admin_data = get_user_by_id_interface(shop.super_admin.id)
shop.super_admin_data = super_admin_data
serializer = SuperShopSerializer(shop)
return self.send_success(data=serializer.data)
class SuperShopListView(SuperBaseView):
"""总后台-商铺-商铺列表"""
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
"role": fields.Integer(required=False, missing=1, comment="访问角色,1:为普通用户,2.为admin用户"),
},
location="query"
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def get(self, request, args):
user = self._get_current_user(request)
if not user:
return self.send_error(
status_code=status.HTTP_401_UNAUTHORIZED, error_message={"error_text": "用户未登录"}
)
# 根据用户信息查找到对应的员工及所属店铺信息
staff_list = list_staff_by_user_id_interface(user.id, roles=StaffRole.SHOP_ADMIN)
if not staff_list:
return self.send_success(data_list=[])
# 根据查到的店铺信息找到对应店铺的信息
shop_ids = [sl.shop_id for sl in staff_list]
shop_list = list_shop_by_shop_ids(shop_ids, args.get("role", 1))
# 查找所有店铺的商品数量
shop_id_2_product_count = count_product_by_shop_ids_interface(shop_ids)
# 添加额外属性
for sl in shop_list:
sl.product_species_count = shop_id_2_product_count.get(sl.id, 0)
sl.is_super_admin = 1 if sl.super_admin_id == user.id else 0
serializer = SuperShopListSerializer(shop_list, many=True)
return self.send_success(data_list=serializer.data)
class SuperShopChoiceView(SuperBaseView):
"""总后台-商铺-商铺选择"""
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
"shop_id": fields.Integer(required=True, comment="商铺id"),
},
location="json"
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def post(self, request, args):
# 还是检验一下是否登录
user = self._get_current_user(request)
if not user:
return self.send_error(
status_code=status.HTTP_401_UNAUTHORIZED, error_message={"error_text": "用户未登录"}
)
shop_id = args.get("shop_id")
shop = get_shop_by_shop_id(shop_id)
# 设置cookies时使用
request.shop = shop
if not shop:
return self.send_fail(error_text="商铺id有误或商铺不存在")
return self.send_success()
def finalize_response(self, request, response, *args, **kwargs):
"""设置店铺id的cookies"""
response = super().finalize_response(request, response, *args, **kwargs)
# shop_id无误,设置cookies
if response.data.get("success"):
response.delete_cookie("wsc_shop_id")
response.set_signed_cookie(
"wsc_shop_id",
request.shop.id,
salt="hzh_wsc_shop_id",
max_age=AUTH_COOKIE_EXPIRE,
)
return response
class SuperShopStatusView(UserBaseView):
"""总后台-通过shop_status查询所有的店铺&修改店铺的shop_status"""
pagination_class = StandardResultsSetPagination
# 弄个假的操作人信息
class Operator:
operate_id = 1
operate_name = ""
operate_img = ""
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
"shop_status": fields.Integer(
required=True,
validate=[
validate.OneOf(
[ShopStatus.CHECKING, ShopStatus.NORMAL, ShopStatus.REJECTED]
)
],
comment="店铺状态",
),
},
location="query"
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def get(self, request, args):
shop_status = args.get("shop_status")
shop_list = list_shop_by_shop_status(shop_status)
# 店铺创建者信息, 与店铺ID
creator_ids = set()
shop_ids = set()
for slq in shop_list:
creator_ids.add(slq.super_admin_id)
shop_ids.add(slq.id)
creator_ids = list(creator_ids)
shop_ids = list(shop_ids)
# 店铺创建者
creator_list = list_user_by_ids_interface(creator_ids)
creator_id_2_creator = {clq.id: clq for clq in creator_list}
# 历史真实姓名
shop_realname_list = list_shop_creator_history_realname(shop_ids)
shop_realname_map = {srl.id: srl.realname for srl in shop_realname_list}
if shop_status == ShopStatus.CHECKING:
for slq in shop_list:
slq.creator = creator_id_2_creator.get(slq.super_admin_id)
slq.current_realname = shop_realname_map.get(slq.id, "")
elif shop_status == ShopStatus.NORMAL:
for slq in shop_list:
slq.creator = creator_id_2_creator.get(slq.super_admin_id)
slq.current_realname = shop_realname_map.get(slq.id, "")
slq.operator = self.Operator
else:
reject_reason_list = list_shop_reject_reason(shop_ids)
shop_id_2_reject_reason = {
rrl.id: rrl.reject_reason for rrl in reject_reason_list
}
for slq in shop_list:
slq.creator = creator_id_2_creator.get(slq.super_admin_id)
slq.current_realname = shop_realname_map.get(slq.id, "")
slq.operator = self.Operator
slq.reject_reason = shop_id_2_reject_reason.get(slq.id, "")
shop_list = self._get_paginated_data(shop_list, SuperShopStatusSerializer)
return self.send_success(data_list=shop_list)
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
"shop_id": fields.Integer(
required=True, validate=[validate.Range(0)], comment="店铺ID"
),
"shop_status": fields.Integer(
required=True,
validate=[validate.OneOf([ShopStatus.NORMAL, ShopStatus.REJECTED])],
comment="店铺状态",
),
"reject_reason": fields.String(
required=False,
missing="",
validate=[validate.Length(0, 200)],
comment="拒绝理由,尽在拒绝的时候需要",
),
},
location="json",
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def put(self, request, args):
shop_id = args.pop("shop_id")
shop = get_shop_by_shop_id(shop_id)
if not shop:
return self.send_fail(error_text="店铺不存在")
# 更改店铺状态
serializer = SuperShopStatusSerializer(shop, data=args)
if not serializer.is_valid():
return self.send_error(
error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return self.send_success()
class SuperShopVerifyView(UserBaseView):
"""总后台-修改店铺认证状态"""
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
"shop_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="店铺ID"
),
"verify_status": fields.Integer(
required=True,
validate=[
validate.OneOf(
[
ShopVerifyActive.YES,
ShopVerifyActive.CHECKING,
ShopVerifyActive.REJECTED,
]
)
],
comment="店铺认证状态",
),
"verify_type": fields.Integer(
required=True,
validate=[
validate.OneOf(
[ShopVerifyType.ENTERPRISE, ShopVerifyType.INDIVIDUAL]
)
],
comment="店铺认证类型,个人/企业",
),
"verify_content": fields.String(
required=True, validate=[validate.Length(0, 200)], comment="认证内容"
),
},
location="json"
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def put(self, request, args):
shop = get_shop_by_shop_id(args.pop("shop_id"))
if not shop:
return self.send_fail(error_text="店铺不存在")
serializer = SuperShopVerifySerializer(shop, data=args)
if not serializer.is_valid():
return self.send_error(
error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return self.send_success()
class SuperShopPayVerifyView(UserBaseView):
"""总后台-修改店铺支付认证"""
@use_args(
{
"sign": fields.String(required=True, comment="加密认证"),
"timestamp": fields.Integer(required=True, comment="时间戳"),
"user_id": fields.Integer(required=True, comment="用户ID"),
"shop_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="店铺ID"
),
"payment_status": fields.Integer(
required=True,
validate=[
validate.OneOf(
[
ShopPayActive.YES,
ShopPayActive.CHECKING,
ShopPayActive.REJECTED,
]
)
],
comment="认证状态",
),
"lc_merchant_no": fields.String(
required=False, validate=[validate.Length(0, 15)], comment="商户号"
),
"lc_terminal_id": fields.String(
required=False, validate=[validate.Length(0, 50)], comment="终端号"
),
"lc_access_token": fields.String(
required=False,
validate=[validate.Length(0, 32)],
comment="扫呗access_token",
),
},
location="json"
)
@SuperBaseView.validate_sign("sign", ("user_id", "timestamp"))
def post(self, request, args):
shop = get_shop_by_shop_id(args.get("shop_id"))
if not shop:
return self.send_fail(error_text="店铺不存在")
payment_status = args.get("payment_status")
if shop.pay_active == payment_status:
text = (
"正在审核中"
if payment_status == ShopPayActive.CHECKING
else "已通过审核"
if payment_status == ShopPayActive.YES
else "已拒绝审核"
)
return self.send_fail(error_text="该店铺%s, 请不要重复操作"%text)
shop.pay_active = payment_status
# 创建paychannel
if shop.pay_active == ShopPayActive.YES:
pay_channel_info = {
"smerchant_no": args.get("lc_merchant_no"),
"terminal_id1": args.get("lc_terminal_id"),
"access_token": args.get("lc_access_token"),
"channel_type": ShopPayChannelType.LCSW,
}
serializer = ShopPayChannelSerializer(data=pay_channel_info, context={"shop":shop})
if serializer.is_valid():
return self.send_error(
error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return self.send_success()
class AdminShopView(AdminBaseView):
"""商户后台-商铺-获取当前店铺与用户信息"""
def get(self, request):
user = self.current_user
shop = self.current_shop
staff = self.current_staff
for _ in USER_OUTPUT_CONSTANT:
setattr(staff, _, getattr(user, _))
some_config = get_some_config_by_shop_id_interface(shop.id)
some_config = SomeConfigSerializer(some_config).data
baidu_token = Baidu.get_baidu_token()
some_token = {"baidu_token": baidu_token}
shop_serializer = AdminShopSerializer(shop)
staff_serializer = StaffSerializer(staff)
return self.send_success(
shop_data=shop_serializer.data,
user_data=staff_serializer.data,
some_config=some_config,
some_token=some_token,
)
class MallShopView(MallBaseView):
"""商城端-商铺-获取当前店铺信息"""
def get(self, request, shop_code):
self._set_current_shop(request, shop_code)
shop = self.current_shop
user = self.current_user
_, delivery_config = get_delivery_config_by_shop_id_interface(shop.id)
some_config = get_some_config_by_shop_id_interface(shop.id)
shop_share = get_share_setup_by_id_interface(shop.id)
shop_serializer = MallShopSerializer(shop)
customer_serializer = UserSerializer(user)
shop_info = dict(shop_serializer.data)
delivery_config_info = AdminDeliveryConfigSerializer(delivery_config).data
pay_config_info = SomeConfigSerializer(some_config).data
shop_share_info = ShareSetupSerializer(shop_share).data
shop_info["delivery_config"] = delivery_config_info
shop_info["pay_active"] = pay_config_info
shop_info["shop_share"] = shop_share_info
# 额外查询用户的积分数据
customer = get_customer_by_user_id_and_shop_id_interface(user.id, shop.id)
customer_info = dict(customer_serializer.data)
customer_info["points"] = round(float(customer.point), 2) if customer else 0
customer_info["is_new_customer"] = (
customer.is_new_customer() if customer else True
)
return self.send_success(shop_data=shop_info, user_data=customer_info)
<file_sep>/wsc_django/wsc_django/apps/order/constant.py
# 订单状态
class OrderStatus:
CANCELED = 0 # 已取消
UNPAID = 1 # 待支付
PAID = 2 # 已支付,未处理
CONFIRMED = 3 # 已确认,处理中 -- 待自提,平配送中
FINISHED = 4 # 已完成
REFUNDED = 5 # 已退款
REFUND_FAIL = 6 # 退款失败
WAITTING = 11 # 等待成团
# 订单支付方式
class OrderPayType:
WEIXIN_JSAPI = 1 # 微信支付
ON_DELIVERY = 2 # 货到付款
# 订单配送方式
class OrderDeliveryMethod:
HOME_DELIVERY = 1 # 送货上门
CUSTOMER_PICK = 2 # 客户自提
# 订单类型
class OrderType:
NORMAL = 1 # 普通订单
GROUPON = 5 # 拼团订单
# 订单退款类型
class OrderRefundType:
WEIXIN_JSAPI_REFUND = 1 # 微信退款
UNDERLINE_REFUND = 2 # 线下退款
MAP_EVENT_ORDER_TYPE = {
0: OrderType.NORMAL, # 没活动,普通订单
1: OrderType.GROUPON, # 拼团活动, 拼团订单
}
<file_sep>/wsc_django/wsc_django/apps/pvuv/urls.py
"""
访问记录相关的路由
"""
from django.urls import path, re_path
from pvuv import views
urlpatterns_admin =[
path('api/admin/product/browse-records/', views.AdminProductBrowseRecordsView.as_view()) # 后台-商品访问记录
]
urlpatterns_mall = [
re_path(r'api/mall/(?P<shop_code>\w+)/browse-record/$', views.MallBrowseRecord.as_view()), # 商城生成访问记录
]
urlpatterns = urlpatterns_admin + urlpatterns_mall
<file_sep>/wsc_django/wsc_django/apps/celery_tasks/celery_auto_work/tasks.py
""" 自动取消(订单,拼团)异步任务 """
import datetime
import os
# import sentry_sdk
from django.utils.timezone import make_aware
# from sentry_sdk.integrations.celery import CeleryIntegration
from django_redis import get_redis_connection
from config.services import get_receipt_by_shop_id, get_msg_notify_by_shop_id
from groupon.constant import GrouponStatus, GrouponAttendStatus
from order.constant import OrderPayType, OrderRefundType, OrderType
from groupon.services import (
get_shop_groupon_by_id,
get_shop_groupon_attend_by_id,
list_paid_details_by_groupon_attend_id
)
from order.selectors import (
list_waitting_order_by_groupon_attend_id,
list_unpaid_order_by_groupon_attend_id,
)
from order.services import (
cancel_order,
# direct_pay,
refund_order,
get_order_by_shop_id_and_id,
direct_pay)
from promotion.events import GrouponEvent
from promotion.services import publish_product_promotion, get_product_promotion, PRODUCT_PROMOTION_KEY
# from .celery_tplmsg_task import (
# GrouponOrderFailAttendTplMsg,
# GrouponOrderRefundFailTplMsg,
# GrouponOrderSuccessAttendTplMsg,
# )
from celery_tasks.main import app
# sentry_sdk.init(SENTRY_DSN, integrations=[CeleryIntegration()])
@app.task(bind=True, name="auto_cancel_order")
def auto_cancel_order(self, shop_id, order_id):
""" 超时未支付(15min)自动取消订单 """
success, _ = cancel_order(shop_id, order_id)
if success:
return
order = get_order_by_shop_id_and_id(shop_id, order_id)
if not order:
return
elif order.order_type == OrderType.GROUPON:
auto_validate_groupon_attend.apply_async(
args=[order.shop_id, order.groupon_attend_id]
)
@app.task(bind=True, name="auto_publish_groupon")
def auto_publish_groupon(self, shop_id, groupon_id):
""" 自动发布拼团事件 """
now = make_aware(datetime.datetime.now())
success, groupon = get_shop_groupon_by_id(shop_id, groupon_id)
if not success:
print("Groupon [id={}] publish failed: {}".format(groupon_id, groupon))
return
if groupon.status != GrouponStatus.ON:
print(
"Groupon [id={}] publish failed: 状态错误{}".format(
groupon_id, groupon.status
)
)
return
if groupon.to_datetime < now:
print(
"Groupon [id={}] publish failed: 已过期{}".format(
groupon_id, groupon.to_datetime
)
)
return
content = {
"id": groupon.id,
"price": round(float(groupon.price), 2),
"to_datetime": groupon.to_datetime.strftime("%Y-%m-%d %H:%M:%S"),
"groupon_type": groupon.groupon_type,
"success_size": groupon.success_size,
"quantity_limit": groupon.quantity_limit,
"succeeded_count": groupon.succeeded_count,
"success_limit": groupon.success_limit,
"succeeded_quantity": int(round(groupon.succeeded_quantity)),
}
event = GrouponEvent(content)
ttl = (groupon.to_datetime - now).total_seconds()
publish_product_promotion(
groupon.shop_id, groupon.product_id, event, ttl=int(ttl)
)
print("Groupon [id={}] publish success".format(groupon.id))
@app.task(bind=True, name="auto_expire_groupon")
def auto_expire_groupon(self, shop_id, groupon_id):
""" 拼团自动过期 """
success, groupon = get_shop_groupon_by_id(shop_id, groupon_id)
if not success:
print("Groupon [id={}] expire failed: {}".format(groupon_id, groupon))
return
if groupon.status == GrouponStatus.EXPIRED:
print(
"Groupon [id={}] expire failed: 状态错误{}".format(
groupon_id, groupon.status
)
)
return
# 任务提前10s过期算作提前
if groupon.to_datetime - make_aware(datetime.datetime.now()) > datetime.timedelta(
seconds=10
):
print(
"Groupon [id={}] expire failed: 未到过期时间{}".format(
groupon_id, make_aware(datetime.datetime.now())
)
)
return
groupon.set_expired()
groupon.save()
print("Groupon [id={}] expire failed".format(groupon_id))
@app.task(bind=True, name="auto_validate_groupon_attend")
def auto_validate_groupon_attend(
self, shop_id: int, groupon_attend_id: int, force: bool = False
):
""" 自动验证拼团参与,如果满员,走订单直接支付 """
success, groupon_attend = get_shop_groupon_attend_by_id(
shop_id, groupon_attend_id, for_update=True
)
if not success:
raise ValueError(groupon_attend)
if groupon_attend.size < groupon_attend.success_size:
print("拼团验证: 拼团参与{}还未满员".format(groupon_attend_id))
return
if groupon_attend.status != GrouponAttendStatus.WAITTING:
raise ValueError(
"拼团验证: 拼团参与{}状态错误{}".format(groupon_attend_id, groupon_attend.status)
)
paid_attend_details = list_paid_details_by_groupon_attend_id(groupon_attend.id)
if len(paid_attend_details) < groupon_attend.size and not force:
print(
"拼团验证: 拼团参与{}还在等待团员支付,当前支付人数{}".format(
groupon_attend_id, len(paid_attend_details)
)
)
return
waitting_orders = list_waitting_order_by_groupon_attend_id(groupon_attend.id)
if len(waitting_orders) != len(paid_attend_details):
raise ValueError(
"拼团验证: 拼团参与{}付款人数{}和订单人数{}不匹配".format(
groupon_attend_id, len(paid_attend_details), len(waitting_orders)
)
)
promotion = get_product_promotion(shop_id, groupon_attend.groupon.product_id)
pattern = PRODUCT_PROMOTION_KEY.format(
shop_id=shop_id, product_id=groupon_attend.groupon.product_id
)
groupon_attend.set_succeeded()
groupon_attend.groupon.succeeded_count += 1
redis_conn = get_redis_connection("subscribe")
redis_conn.hset(pattern, "succeeded_count", groupon_attend.groupon.succeeded_count)
for waitting_order in waitting_orders:
if promotion and isinstance(promotion, GrouponEvent):
quantity = int(
round(float(waitting_order.amount_net) / float(promotion.price))
)
groupon_attend.groupon.succeeded_quantity += quantity
redis_conn.hset(
pattern,
"succeeded_quantity",
int(groupon_attend.groupon.succeeded_quantity),
)
direct_pay(waitting_order)
print("拼团验证: 拼团参与{}成团成功".format(groupon_attend_id))
groupon_attend.save()
# 拼团成功, 发送拼团成功的模板消息
msg_notify = get_msg_notify_by_shop_id(shop_id)
# if msg_notify.group_success_wx:
# for waitting_order in waitting_orders:
# GrouponOrderSuccessAttendTplMsg.send(order_id=waitting_order.id)
@app.task(bind=True, name="auto_fail_groupon_attend")
def auto_fail_groupon_attend(self, shop_id: int, groupon_attend_id: int, reason: str):
""" 拼团参与自动失败 """
success, groupon_attend = get_shop_groupon_attend_by_id(
shop_id, groupon_attend_id, for_update=True
)
if not success:
raise ValueError(groupon_attend)
if groupon_attend.status != GrouponAttendStatus.WAITTING:
print("拼团失败: 拼团参与{}状态错误{}".format(groupon_attend_id, groupon_attend.status))
return
paid_attend_details = list_paid_details_by_groupon_attend_id(
groupon_attend.id
)
waitting_orders = list_waitting_order_by_groupon_attend_id(
groupon_attend.id
)
if len(waitting_orders) != len(paid_attend_details):
raise ValueError(
"拼团失败: 拼团参与{}付款人数{}和订单人数{}不匹配".format(
groupon_attend_id, len(paid_attend_details), len(waitting_orders)
)
)
groupon_attend.set_failed(reason)
groupon_attend.save()
# 拼团中订单自动退款
map_refund_order = {True: [], False: []}
for waitting_order in waitting_orders:
refund_type = (
OrderRefundType.WEIXIN_JSAPI_REFUND
if waitting_order.pay_type == OrderPayType.WEIXIN_JSAPI
else OrderRefundType.UNDERLINE_REFUND
)
success, _ = refund_order(
waitting_order.shop.id, waitting_order, refund_type
)
map_refund_order[success].append(waitting_order.id)
# 未支付订单自动取消
unpaid_orders = list_unpaid_order_by_groupon_attend_id(
groupon_attend.id
)
for unpaid_order in unpaid_orders:
cancel_order(unpaid_order.shop_id, unpaid_order.id)
print(
"拼团失败: 拼团参与{},退款成功{},退款失败".format(
groupon_attend_id,
len(map_refund_order.get(True)),
len(map_refund_order.get(False)),
)
)
# 拼团失败, 发送拼团失败退款的模板消息
msg_notify = get_msg_notify_by_shop_id(shop_id)
# if msg_notify.group_failed_wx:
# for order_id in map_refund_order.get(True):
# GrouponOrderFailAttendTplMsg.send(order_id=order_id)
# for order_id in map_refund_order.get(False):
# GrouponOrderRefundFailTplMsg.send(order_id=order_id)
<file_sep>/wsc_django/wsc_django/apps/user/services.py
import re
from django_redis import get_redis_connection
from settings import QINIU_SHOP_IMG_HOST
from shop.constant import ShopPayChannelType
from shop.models import PayChannel
from user.constant import UserLoginType
from user.models import User, UserOpenid
def create_user(user_info: dict):
"""
创建一个user
:param user_info:
:return:
"""
user = User(**user_info)
if user_info.get("password", None):
user.set_password(user_info.get("password"))
if not re.match(r'^http(s)?://.+$', user.head_image_url):
user.head_image_url = QINIU_SHOP_IMG_HOST + user.head_image_url
user.save()
return user
def create_user_openid(user_id: int, mp_appid: str, wx_openid: str):
"""
创建用户openID
:param user_openid_info:
:return:
"""
user_openid = UserOpenid(user_id=user_id, mp_appid=mp_appid, wx_openid=wx_openid)
user_openid.save()
return user_openid
def update_user_basic_data(user: User, user_data: dict):
"""
修改用户基本信息
:param user:
:param user_data:{
nickname:nickname,
realname:realname,
sex:sex,
phone:phone,
}
:return:
"""
for k,v in user_data.items():
setattr(user, k, v)
user.save()
return user
def update_user_phone(user: User, phone: str):
"""
修改用户绑定的手机号
:param user:
:param phone:
:return:
"""
res, search_user = get_user_by_phone(phone, UserLoginType.PHONE)
if search_user:
return False, "该手机号已绑定其他用户"
user.phone = phone
user.save()
return True, user
def update_user_password(user: User, password1: str, password2: str):
"""
修改用户的密码
:param password1:
:param password2:
:return:
"""
if password1 != password2:
return False, "两次输入的密码不一致"
user.set_password(password1)
user.save()
return True, ""
def validate_sms_code(phone: str, sms_code: str):
"""
验证短信验证码
:param phone:
:param sms_code:
:return:
"""
redis_conn = get_redis_connection("verify_codes")
real_sms_code = redis_conn.get("sms_%s" % phone)
if not real_sms_code:
return False, "验证码已过期"
if str(real_sms_code.decode()) != sms_code:
return False, "短信验证码错误"
return True, ""
def send_email(user: User, email: str):
"""
发送验证邮件
:param user:
:param email:
:return:
"""
if user.email != email:
return False, "激活邮箱与绑定邮箱不一致"
if user.email_active:
return False, "该邮箱已激活,无需重复操作"
# 生成激活链接
url = user.generate_verify_email_url()
# 发送邮件
print(url)
return True, ""
def get_user_by_id(user_id: int):
"""
通过用户id获取用户
:param user_id:
:return:
"""
user = User.objects.filter(id=user_id).first()
return user
def get_user_by_phone(phone: str, login_type: int):
"""
通过手机号获取用户, 登录方式必须为手机号
:param phone:
:param login_type:
:return:
"""
if login_type != UserLoginType.PHONE:
return False, "登录方式必须为手机号登录"
user = User.objects.filter(phone=phone).first()
return True, user
def get_user_by_phone_and_password(phone: str, password: str, login_type: int):
"""
通过手机号获取用户和密码, 登录方式必须为密码
:param phone:
:param password:
:return:
"""
if login_type != UserLoginType.PWD:
return False, "登录方式必须为密码登录"
user = User.objects.filter(phone=phone).first()
if not user:
return False, "用户不存在"
if not user.password:
return False, "用户还未设置密码,请用手机号登录"
if not user.check_password(password):
return False, "密码不正确"
return True, user
def get_user_by_wx_unionid(wx_unionid: str):
"""
通过微信unionid获取一个用户
:param wx_unionid:
:return:
"""
user = User.objects.filter(wx_unionid=wx_unionid).first()
return user
def get_openid_by_user_id_and_appid(user_id: int, mp_appid: str):
"""
通过用户id和公众号的mp_appid获取用户的wx_openid
:param user_id:
:param mp_appid:
:return:
"""
user_openid = UserOpenid.objects.filter(user_id=user_id, mp_appid=mp_appid).first()
if not user_openid:
return False, 'openid不存在'
return True, user_openid
def get_user_by_email(email: str):
"""
通过用户邮箱获取用户
:param email:
:return:
"""
user = User.objects.filter(email=email).first()
return user
def get_pay_channel_by_shop_id(shop_id: int):
"""
通过商铺id获取支付渠道信息
:param shop_id:
:return:
"""
shop_pay_channel = PayChannel.objects.filter(shop_id=shop_id).first()
if not shop_pay_channel:
return False, "店铺未开通线上支付"
elif shop_pay_channel.channel_type != ShopPayChannelType.LCSW:
return False, "店铺支付渠道错误"
return True, shop_pay_channel
def list_user_by_ids(user_ids: list):
"""
通过id列表获取user列表
:param user_ids:
:return:
"""
user_list = User.objects.filter(id__in=user_ids).all()
return user_list
def list_openid_by_user_ids_and_appid(user_ids: list, mp_appid: str):
"""
通过user_ids与mp_appid列出wx_openid
:param user_ids:
:param mp_appid:
:return:
"""
user_openid_list = UserOpenid.objects.filter(user_id__in=user_ids, mp_appid=mp_appid).all()
return user_openid_list<file_sep>/wsc_django/wsc_django/utils/views.py
"""自己定义的视图类"""
import datetime
from rest_framework import status, exceptions
from rest_framework.response import Response
from rest_framework.generics import GenericAPIView
from rest_framework_jwt.settings import api_settings as jwt_setting
from settings import AUTH_COOKIE_DOMAIN
from shop.services import get_shop_by_shop_id, get_shop_by_shop_code
from staff.constant import StaffRole, StaffPermission
from staff.services import get_staff_by_user_id_and_shop_id
from user.models import User
from user.utils import ZhiHaoJWTAuthentication
from wsc_django.utils.authenticate import WSCIsLoginAuthenticate, SimpleEncrypt
from wsc_django.utils.permission import StaffRolePermission, WSCStaffPermission
class GlobalBaseView(GenericAPIView):
"""
通用的请求处理基类,主要定义了一些API通信规范和常用的工具
响应处理:
一个请求在逻辑上分为三个结果:请求错误、请求失败和请求成功。请求错误会通常
是由于请求不合法(如参数错误、请求不存在、token无效等),直接返回http状
态码;请求失败通常是由于一些事物逻辑上的错误,比如库存不够、余额不足等;请
求成功不解释
错误请求: send_error(status_code, error_message)
请求失败: send_fail(fail_code, fail_text)[返回JSON数据格式: {"success":False, "code":fail_code, "text":fail_text}]
请求成功: send_success(**kwargs)[返回JSON数据格式:{"success":True, **kwargs}]
"""
def send_success(self, **kwargs):
obj = {"success": True}
for k in kwargs:
obj[k] = kwargs[k]
return obj
def send_fail(
self,
error_text=None,
error_code=None,
error_redirect=None,
error_key=None,
error_obj=None,
error_dict=None,
):
if error_dict:
error_code = error_dict.get("error_code", 500)
error_text = error_dict.get("error_text", "网络错误")
if error_obj:
error_code = error_obj.error_code
error_text = error_obj.error_text
if type(error_code) == int:
res = {
"success": False,
"error_code": error_code,
"error_text": error_text,
"error_redirect": error_redirect,
"error_key": error_key,
}
else:
res = {"success": False, "error_text": error_text}
return res
def send_error(self, status_code, error_message: dict):
"""后期可以在改进,这里直接返回对应状态码的响应"""
# 处理序列化器返回错误
error_message["error_code"] = status_code
if status_code == 400:
error_message = {'error_text': list(error_message)[0] + "错误"}
return self.send_fail(error_dict=error_message)
def _get_paginated_data(self, query_set, serializer):
"""进行分页操作"""
page = self.paginate_queryset(query_set)
if page is not None:
serializer = serializer(page, many=True)
return self.get_paginated_response(serializer.data).data
else:
serializer = serializer(query_set, many=True)
return serializer.data
def finalize_response(self, request, response, *args, **kwargs):
if isinstance(response, Response):
pass
else:
response = Response(data=response)
return super().finalize_response(request, response, *args, **kwargs)
class UserBaseView(GlobalBaseView):
"""用户的基类,用来处理认证"""
authentication_classes = (WSCIsLoginAuthenticate, )
def initialize_request(self, request, *args, **kwargs):
request = super().initialize_request(request, *args, **kwargs)
user = self._get_current_user(request)
self.current_user = user
# 用于WSCIsLoginAuthenticate中进行验证
request.current_user = self.current_user
return request
def _get_current_user(self, request):
jwt = ZhiHaoJWTAuthentication()
try:
res = jwt.authenticate(request)
except Exception as e:
print(e)
res = None
if res:
user = res[0]
else:
user = None
return user
def _set_current_user(
self, user: User, expiration_delta=datetime.timedelta(hours=1)
):
jwt_payload_handler = jwt_setting.JWT_PAYLOAD_HANDLER
jwt_encode_handler = jwt_setting.JWT_ENCODE_HANDLER
payload = jwt_payload_handler(user, expiration_delta)
token = jwt_encode_handler(payload)
refresh_payload = jwt_payload_handler(user, datetime.timedelta(days=1), 'refresh_token')
refresh_token = jwt_encode_handler(refresh_payload)
return (token, refresh_token)
class StaffBaseView(UserBaseView):
"""员工的基类,用来处理是否有效员工"""
permission_classes = (WSCStaffPermission,)
staff_roles = StaffRole
staff_permissions = StaffPermission
def initialize_request(self, request, *args, **kwargs):
request = super().initialize_request(request, *args, **kwargs)
try:
wsc_shop_id = request.get_signed_cookie(
"wsc_shop_id", salt="hzh_wsc_shop_id",
)
except Exception as e:
wsc_shop_id = 0
# 从cookie中获取shop_id进行查询
shop = get_shop_by_shop_id(int(wsc_shop_id))
self.current_shop = shop
current_staff = None
if shop and self.current_user:
current_staff = get_staff_by_user_id_and_shop_id(self.current_user.id, self.current_shop.id)
self.current_staff = current_staff
return request
@classmethod
def permission_required(cls, permission_list: list):
"""验证员工权限的装饰器"""
def inner(func):
def wrapper(self, *args, **kwargs):
for permission in permission_list:
if self.current_staff.permissions & permission == 0:
return Response(status=status.HTTP_403_FORBIDDEN)
return func(self, *args, **kwargs)
return wrapper
return inner
class AdminBaseView(StaffBaseView):
"""管理员的基类"""
permission_classes = (WSCStaffPermission, StaffRolePermission,)
class MallBaseView(UserBaseView):
"""商城基类"""
def _set_current_shop(self, request, shop_code):
"""设置当前商铺"""
shop = get_shop_by_shop_code(shop_code)
if not shop:
raise exceptions.NotFound("店铺不存在")
self.current_shop = shop
class SuperBaseView(GlobalBaseView):
"""对接总后台, 没有登录信息和店铺信息"""
@classmethod
def validate_sign(cls, sign_: str, params: tuple):
def f(func):
def wrapper(self, request, args):
sign = args.get(sign_)
key = SimpleEncrypt.decrypt(sign)
key_list = key.split("@")
if len(key_list) != len(params):
return self.send_error(403, {"error_text": "鉴权失败"})
for index, v in enumerate(params):
if key_list[index] != str(args.get(v)):
return self.send_error(403, {"error_text": "鉴权失败"})
return func(self, request, args)
return wrapper
return f
def _get_current_user(self, request):
jwt = ZhiHaoJWTAuthentication()
try:
res = jwt.authenticate(request)
except Exception as e:
print(e)
res = None
if res:
user = res[0]
else:
user = None
return user
def _set_current_user(
self, user: User, expiration_delta=datetime.timedelta(days=1)
):
jwt_payload_handler = jwt_setting.JWT_PAYLOAD_HANDLER
jwt_encode_handler = jwt_setting.JWT_ENCODE_HANDLER
payload = jwt_payload_handler(user, expiration_delta)
token = jwt_encode_handler(payload)
refresh_payload = jwt_payload_handler(user, datetime.timedelta(days=1), 'refresh_token')
refresh_token = jwt_encode_handler(refresh_payload)
return (token, refresh_token)
def _refresh_current_user(
self, user: User, expiration_delta=datetime.timedelta(days=1)
):
jwt_payload_handler = jwt_setting.JWT_PAYLOAD_HANDLER
jwt_encode_handler = jwt_setting.JWT_ENCODE_HANDLER
payload = jwt_payload_handler(user, expiration_delta)
token = jwt_encode_handler(payload)
return token
<file_sep>/wsc_django/wsc_django/apps/storage/urls.py
"""
库存相关的路由
"""
from django.urls import path
from storage import views
urlpatterns = [
path('api/admin/product/storage-records/', views.AdminProductStorageRecordsView.as_view()), # 获取货品库存变更记录
]
<file_sep>/wsc_django/wsc_django/apps/order/interface.py
from config.services import get_msg_notify_by_shop_id
from customer.services import list_customer_ids_by_user_id, get_customer_by_user_id_and_shop_id
from delivery.services import get_order_delivery_by_delivery_id, create_order_delivery
from groupon.models import GrouponAttend
from groupon.services import get_groupon_by_id, count_groupon_attend_by_groupon_id_and_customer_id
from order.models import Order
from celery_tasks.celery_auto_work.tasks import auto_cancel_order, auto_validate_groupon_attend
from payment.service import get_wx_jsApi_pay
from printer.services import print_order
from product.services import list_product_by_ids, get_product_by_id
from logs.services import list_order_log_by_shop_id_and_order_num
# from celery_task.celery_tplmsg_task import (
# OrderCommitTplMsg,
# OrderDeliveryTplMsg,
# OrderFinishTplMsg,
# OrderRefundTplMsg,
# )
from promotion.services import get_product_promotion
def list_product_by_ids_interface(shop_id: int, product_ids: list) -> list:
"""通过ID列出商品"""
product_list = list_product_by_ids(shop_id, product_ids)
return product_list
def get_product_by_id_interface(shop_id: int, product_id: int) -> object:
"""通过店铺ID与货品ID获取一个货品"""
product = get_product_by_id(shop_id, product_id)
return product
def jsapi_params_interface(order, wx_openid):
"""公众号支付参数获取"""
return get_wx_jsApi_pay(order, wx_openid)
def auto_cancel_order_interface(shop_id, order_id):
"""超时未支付(15min)自动取消订单"""
auto_cancel_order.apply_async(args=(shop_id, order_id), countdown=15 * 60)
def order_commit_tplmsg_interface(order_id: int) -> None:
"""发送订单提交成功模板消息"""
# OrderCommitTplMsg.send(order_id=order_id)
def list_customer_ids_by_user_id_interface(user_id: int) -> list:
"""通过user_id获取一个人所有的客户信息"""
customer_ids = list_customer_ids_by_user_id(user_id)
return customer_ids
def get_customer_by_user_id_and_shop_id_interface(user_id: int, shop_id: int):
"""通过user_id和shop_id获取一个客户"""
customer = get_customer_by_user_id_and_shop_id(user_id, shop_id)
return customer
def list_order_log_by_shop_id_and_order_num_interface(shop_id: int, order_num: str):
"""获取一个订单的历史操作记录"""
log_list = list_order_log_by_shop_id_and_order_num(shop_id, order_num)
return log_list
def get_order_delivery_by_delivery_id_interface(delivery_id: int):
"""获取订单配送记录"""
order_delivery = get_order_delivery_by_delivery_id(delivery_id)
return order_delivery
def print_order_interface(order: Order, user_id: int):
"""订单打印"""
return print_order(order, user_id)
def create_order_delivery_interface(delivery_info: dict):
"""创建订单配送记录"""
delivery = create_order_delivery(delivery_info)
return delivery
def get_msg_notify_by_shop_id_interface(shop_id: int):
"""获取一个店铺的消息通知设置"""
msg_notify = get_msg_notify_by_shop_id(shop_id)
return msg_notify
def auto_validate_groupon_attend_interface(shop_id: int, groupon_attend: GrouponAttend):
"""自动验证拼团参与,如果满员,走订单直接支付"""
if not groupon_attend:
return
auto_validate_groupon_attend.apply_async(args=[shop_id, groupon_attend.id])
def get_product_promotion_interface(shop_id: int, product_id: int):
"""获取一个商品的活动"""
event = get_product_promotion(shop_id, product_id)
return event
def get_groupon_by_id_interface(shop_id: int, groupon_id: int):
"""获取一个拼团"""
groupon = get_groupon_by_id(shop_id, groupon_id)
return groupon
def count_groupon_attend_by_groupon_id_and_customer_id_interface(
groupon_id: int, customer_id: int
):
"""
计算一个人在某个拼团的参与次数
:param groupon_id:
:param customer_id:
:return:
"""
total_attend_count = count_groupon_attend_by_groupon_id_and_customer_id(
groupon_id, customer_id
)
return total_attend_count
def order_delivery_tplmsg_interface(order_id: int):
"""发送订单配送模板消息"""
# OrderDeliveryTplMsg.send(order_id=order_id)
def order_finish_tplmsg_interface(order_id: int):
"""发送订单完成模板消息"""
# OrderFinishTplMsg.send(order_id=order_id)
def order_refund_tplmsg_interface(order_id: int):
"""发送订单退款模板消息"""
# OrderRefundTplMsg.send(order_id=order_id)<file_sep>/wsc_django/wsc_django/apps/order/migrations/0004_orderaddress_added.py
# Generated by Django 3.1.6 on 2021-06-08 09:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('order', '0003_auto_20210606_2054'),
]
operations = [
migrations.AddField(
model_name='orderaddress',
name='added',
field=models.CharField(max_length=50, null=True, verbose_name='补充说明'),
),
]
<file_sep>/wsc_django/wsc_django/apps/groupon/views.py
from rest_framework import status
from webargs import fields, validate
from webargs.djangoparser import use_args
from groupon.constant import GrouponType, GrouponStatus, GrouponAttendStatus
from order.serializers import AdminOrdersSerializer
from product.services import get_product_by_id
from wsc_django.utils.arguments import StrToList
from wsc_django.utils.pagination import StandardResultsSetPagination
from wsc_django.utils.views import AdminBaseView, MallBaseView
from groupon.interface import (
expire_groupon_interface,
publish_gruopon_interface,
immediate_cancel_order_interface,
sync_success_groupon_attend_interface,
immediate_fail_groupon_attend_interface,
list_order_by_groupon_attend_id_interface,
list_unpay_order_by_groupon_attend_ids_interface,
delay_fail_groupon_attend_interface)
from groupon.serializers import (
AdminGrouponSerializer,
AdminGrouponsSerializer,
AdminGrouponCreateSerializer,
AdminGrouponAttendSerializer,
)
from groupon.services import (
set_groupon_off,
list_shop_groupons,
launch_groupon_attend,
get_shop_groupon_by_id,
validate_groupon_period,
force_success_groupon_attend,
list_waitting_groupon_attends,
get_shop_groupon_attend_by_id,
list_groupon_attends_by_groupon,
list_created_groupon_attends_by_groupon_id,
)
class AdminGrouponView(AdminBaseView):
"""后台-玩法-拼团-创建拼团&编辑拼团&拼团活动详情获取"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_PROMOTION]
)
@use_args(
{
"product_id": fields.Integer(required=True, comment="商品id"),
"price": fields.Decimal(required=True, comment="拼团价"),
"from_datetime": fields.DateTime(required=True, comment="拼团活动开始时间"),
"to_datetime": fields.DateTime(required=True, comment="拼团活动结束时间"),
"groupon_type": fields.Integer(
required=True,
validate=[validate.OneOf([GrouponType.NORMAL, GrouponType.MENTOR])],
comment="拼团活动类型 1:普通 2:老带新",
),
"success_size": fields.Integer(
required=True, validate=[validate.Range(2, 50)], comment="成团人数"
),
"quantity_limit": fields.Integer(
required=True, validate=[validate.Range(0)], comment="购买数量上限"
),
"success_limit": fields.Integer(
required=True, validate=[validate.Range(0)], comment="成团数量上限"
),
"attend_limit": fields.Integer(
required=True, validate=[validate.Range(0)], comment="参团数量上限"
),
"success_valid_hour": fields.Integer(
required=True, validate=[validate.OneOf([24, 48])], comment="开团有效时间"
),
},
location="json"
)
def post(self, request, args):
success, msg = validate_groupon_period(
args["product_id"], args["from_datetime"], args["to_datetime"]
)
if not success:
return self.send_fail(error_text=msg)
product = get_product_by_id(self.current_shop.id, args.pop("product_id"), filter_delete=False)
if not product:
return self.send_fail(error_text="货品不存在")
serializer = AdminGrouponCreateSerializer(
data=args, context={"self": self, "product": product}
)
if not serializer.is_valid():
return self.send_error(
error_message=serializer.errors, status_code=status.HTTP_400_BAD_REQUEST
)
groupon = serializer.save()
publish_gruopon_interface(groupon)
expire_groupon_interface(groupon)
return self.send_success()
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_PROMOTION]
)
@use_args({"groupon_id": fields.String(required=True, comment="拼团id")}, location="query")
def get(self, request, args):
success, groupon = get_shop_groupon_by_id(
self.current_shop.id, args["groupon_id"]
)
if not success:
return self.send_fail(error_text=groupon)
serializer = AdminGrouponSerializer(groupon)
return self.send_success(data=serializer.data)
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_PROMOTION]
)
@use_args(
{
"groupon_id": fields.Integer(required=True, comment="拼团id"),
"product_id": fields.Integer(required=True, comment="商品id"),
"price": fields.Decimal(required=True, comment="拼团价"),
"from_datetime": fields.DateTime(required=True, comment="拼团活动开始时间"),
"to_datetime": fields.DateTime(required=True, comment="拼团活动结束时间"),
"groupon_type": fields.Integer(
required=True,
validate=[validate.OneOf([GrouponType.NORMAL, GrouponType.MENTOR])],
comment="拼团活动类型 1:普通 2:老带新",
),
"success_size": fields.Integer(
required=True, validate=[validate.Range(2, 50)], comment="成团人数"
),
"quantity_limit": fields.Integer(required=True, comment="购买数量上限"),
"success_limit": fields.Integer(required=True, comment="成团数量上限"),
"attend_limit": fields.Integer(required=True, comment="参团数量上限"),
"success_valid_hour": fields.Integer(
required=True, validate=[validate.OneOf([24, 48])], comment="开团有效时间"
),
},
location="json"
)
def put(self, request, args):
shop_id = self.current_shop.id
product = get_product_by_id(
shop_id, args.pop("product_id"), filter_delete=False
)
if not product:
return self.send_fail(error_text="货品不存在")
success, groupon = get_shop_groupon_by_id(shop_id, args.pop("groupon_id"))
if not success:
return self.send_fail(error_text=groupon)
elif groupon.status == GrouponStatus.ON:
return self.send_fail(error_text="拼团活动正在启用中,请停用后再进行编辑")
success, msg = validate_groupon_period(
product.id,
args["from_datetime"],
args["to_datetime"],
groupon_id=groupon.id,
)
if not success:
return self.send_fail(error_text=msg)
states = [
GrouponAttendStatus.CREATED,
GrouponAttendStatus.WAITTING,
GrouponAttendStatus.SUCCEEDED,
GrouponAttendStatus.FAILED,
]
groupon_attends = list_groupon_attends_by_groupon(groupon, states)
if groupon_attends:
return self.send_fail(error_text="已经有用户参团, 拼团无法编辑")
serializer = AdminGrouponCreateSerializer(
groupon, data=args, context={"self": self, "product": product}
)
# 参数已在use_args中验证,此处不在验证
serializer.is_valid()
groupon = serializer.save()
publish_gruopon_interface(groupon)
expire_groupon_interface(groupon)
return self.send_success()
class AdminGrouponPeriodVerificationView(AdminBaseView):
"""后台-玩法-拼团-验证时间段"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_PROMOTION]
)
@use_args(
{
"product_id": fields.Integer(required=True, comment="商品id"),
"from_datetime": fields.DateTime(required=True, comment="拼团活动开始时间"),
"to_datetime": fields.DateTime(required=True, comment="拼团活动结束时间"),
},
location="json"
)
def post(self, request, args):
success, msg = validate_groupon_period(
args["product_id"], args["from_datetime"], args["to_datetime"]
)
if not success:
return self.send_fail(error_text=msg)
return self.send_success()
class AdminGrouponOffView(AdminBaseView):
"""后台-玩法-拼团-停用拼团活动"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_PROMOTION]
)
@use_args({"groupon_id": fields.Integer(required=True, comment="拼团id")}, location="json")
def post(self, request, args):
success, groupon = set_groupon_off(
self.current_shop.id, self.current_user.id, args["groupon_id"]
)
if not success:
return self.send_fail(error_text=groupon)
success, waitting_groupon_attends = list_waitting_groupon_attends(
self.current_shop.id, groupon.id
)
if not success:
raise ValueError("拼团{}停用成功,但是退款失败".format(groupon.id))
for groupon_attend in waitting_groupon_attends:
immediate_fail_groupon_attend_interface(
self.current_shop.id, groupon_attend
)
# 拿到已创建的拼团参与(只有团长未支付才会处于这种状态)
created_groupon_attends = list_created_groupon_attends_by_groupon_id(groupon.id)
created_groupon_attend_ids = [
groupon_attend.id for groupon_attend in created_groupon_attends
]
waiting_pay_open_groupon_orders = list_unpay_order_by_groupon_attend_ids_interface(
created_groupon_attend_ids
)
for order in waiting_pay_open_groupon_orders:
immediate_cancel_order_interface(self.current_shop.id, order.id)
return self.send_success()
class AdminGrouponsView(AdminBaseView):
"""后台-玩法-拼团-列表"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_PROMOTION]
)
@use_args(
{
"product_name": fields.String(missing=None, comment="商品名搜索"),
},
location="query"
)
def get(self, request, args):
groupons = list_shop_groupons(self.current_shop.id, args)
groupons = self._get_paginated_data(groupons, AdminGrouponsSerializer)
return self.send_success(data_list=groupons)
class AdminGrouponAttendsView(AdminBaseView):
"""后台-玩法-拼团-参与拼团列表"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_PROMOTION]
)
@use_args(
{
"groupon_id": fields.Integer(required=True, comment="拼团id"),
"groupon_attend_status": StrToList(
required=False,
missing=[
GrouponAttendStatus.WAITTING,
GrouponAttendStatus.SUCCEEDED,
GrouponAttendStatus.FAILED,
],
validate=validate.ContainsOnly(
[
GrouponAttendStatus.WAITTING,
GrouponAttendStatus.SUCCEEDED,
GrouponAttendStatus.FAILED,
]
),
comment="拼团参与状态,1:拼团中 2:已成团 3:已失败",
),
},
location="query"
)
def get(self, request, args):
success, groupon = get_shop_groupon_by_id(
self.current_shop.id, args.pop("groupon_id")
)
if not success:
return self.send_fail(error_text=groupon)
groupon_attends = list_groupon_attends_by_groupon(groupon, args["groupon_attend_status"])
groupon_attends = self._get_paginated_data(groupon_attends, AdminGrouponAttendSerializer)
return self.send_success(data_list=groupon_attends)
class AdminGrouponAttendView(AdminBaseView):
"""后台-玩法-拼团-参与拼团详情"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_PROMOTION]
)
@use_args({"groupon_attend_id": fields.Integer(required=True, comment="拼团参与id")}, location="query")
def get(self, request, args):
success, groupon_attend = get_shop_groupon_attend_by_id(
self.current_shop.id, args["groupon_attend_id"]
)
if not success:
return self.send_fail(error_text=groupon_attend)
serializer = AdminGrouponAttendSerializer(groupon_attend)
return self.send_success(data=serializer.data)
class AdminGrouponAttendOrdersView(AdminBaseView):
"""后台-玩法-拼团-拼团参与详情-成员订单"""
pagination_class = StandardResultsSetPagination
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_PROMOTION]
)
@use_args(
{
"groupon_attend_id": fields.Integer(
required=True, validate=[validate.Range(1)], comment="拼团参与ID"
),
},
location="query"
)
def get(self, request, args):
args["shop_id"] = self.current_shop.id
order_list = list_order_by_groupon_attend_id_interface(**args)
order_list = self._get_paginated_data(order_list, AdminOrdersSerializer)
return self.send_success(data_list=order_list)
class AdminGrouponAttendSuccessForceView(AdminBaseView):
"""后台-玩法-拼团-强制成功参与拼团"""
@AdminBaseView.permission_required(
[AdminBaseView.staff_permissions.ADMIN_PROMOTION]
)
@use_args({"groupon_attend_id": fields.Integer(required=True, comment="拼团参与id")}, location="query")
def post(self, request, args):
success, groupon_attend = force_success_groupon_attend(
self.current_shop.id, args["groupon_attend_id"]
)
if not success:
return self.send_fail(error_text=groupon_attend)
sync_success_groupon_attend_interface(self.current_shop.id, groupon_attend.id)
return self.send_success()
class MallGrouponAttendInitationView(MallBaseView):
"""商城-玩法-拼团-开团"""
@use_args({"groupon_id": fields.Integer(required=True, comment="拼团活动id")}, location="json")
def post(self, request, args, shop_code):
self._set_current_shop(request, shop_code)
success, groupon_attend = launch_groupon_attend(
self.current_shop.id, self.current_user.id, args["groupon_id"]
)
if not success:
return self.send_fail(error_text=groupon_attend)
delay_fail_groupon_attend_interface(self.current_shop.id, groupon_attend)
return self.send_success(groupon_attend_id=groupon_attend.id)<file_sep>/wsc_django/wsc_django/apps/shop/utils.py
from wechatpy import WeChatClient
from wechatpy.client.api import WeChatWxa
from qcloud_cos import CosConfig, CosS3Client, CosServiceError
from settings import (
MINI_PROGRAM_APPID,
MINI_PROGRAM_APPSECRET,
TENCENT_COS_SECRETID,
TENCENT_COS_SECRETKEY,
TENCENT_COS_IMG_HOST,
)
def get_shop_mini_program_qcode(shop_code):
"""
获取店铺的小程序码
:param shop_code: 商铺编号
:return:
"""
appid = MINI_PROGRAM_APPID
secret = MINI_PROGRAM_APPSECRET
wechat_client = WeChatClient(appid, secret)
mini_program = WeChatWxa(wechat_client)
path = '/pages/index/index?shop_code=%s'%shop_code
try:
qcode_file = mini_program.create_qrcode(path).content
except Exception as e:
print(e)
return None
return qcode_file
def put_mps_to_cos():
secret_id = TENCENT_COS_SECRETID
secret_key = TENCENT_COS_SECRETKEY
region = 'ap-nanjing' # 区域
bucket = 'zhihao-1300126182' # 桶名词
token = None # 使用临时密钥需要传入Token,默认为空,可不填
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
client = CosS3Client(config)
key = 'mp3/new_order_notify.mp3'
try:
response = client.put_object(
Bucket=bucket,
Body='',
Key=key,
ContentType='image/png'
)
except CosServiceError as e:
return False, e.get_error_msg()
if not response.get("ETag", None):
return False, "上传失败"
def put_qcode_file_to_tencent_cos(qcode_file, shop_code):
"""
将小程序码文件上传到腾讯云cos并返回url
:param qcode_file: 小程序码文件流
:param shop_code: 商铺编号,用来作为文件名
:return:
"""
secret_id = TENCENT_COS_SECRETID
secret_key = TENCENT_COS_SECRETKEY
region = 'ap-nanjing' # 区域
bucket = 'zhihao-1300126182' # 桶名词
token = None # 使用临时密钥需要传入Token,默认为空,可不填
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
client = CosS3Client(config)
key = 'img/' + shop_code + '.png' # 文件名
try:
response = client.put_object(
Bucket=bucket,
Body=qcode_file,
Key=key,
ContentType='image/png'
)
except CosServiceError as e:
return False, e.get_error_msg()
if not response.get("ETag", None):
return False, "上传失败"
return True, TENCENT_COS_IMG_HOST + key<file_sep>/wsc_django/wsc_django/apps/shop/urls.py
"""
店铺相关的路由
"""
from django.urls import path, re_path
from shop import views
urlpatterns_admin = [
path('api/super/shop/', views.SuperShopView.as_view()), # 总后台-商铺创建和详情
path('api/super/shops/', views.SuperShopListView.as_view()), # 总后台-商铺列表
path('api/super/shop/choice/', views.SuperShopChoiceView.as_view()), # 总后台-商铺选择
path('api/super/shop/status/', views.SuperShopStatusView.as_view()), # 总后台-通过shop_status获取所有商铺&修改商铺的shop_status
path('api/super/shop/verify/', views.SuperShopVerifyView.as_view()), # 总后台-修改商铺的认证状态
path('api/super/shop/pay-verify/', views.SuperShopPayVerifyView.as_view()), # 总后台-修改店铺的支付认证状态
path('api/admin/shop/', views.AdminShopView.as_view()), # 商户后台-商铺详情
]
urlpatterns_mall = [
re_path(r'^api/mall/shop/(?P<shop_code>\w+)/$', views.MallShopView.as_view()), # 商城端-全局获取店铺信息
]
urlpatterns = urlpatterns_admin + urlpatterns_mall
<file_sep>/wsc_django/wsc_django/apps/order/selectors.py
from collections import defaultdict
from logs.services import get_order_log_time_by_order_num
from order.constant import OrderStatus
from order.models import Order, OrderAddress, OrderDetail
from user.constant import USER_OUTPUT_CONSTANT
def get_shop_order_by_num(shop_id: int, num: str):
"""
通过店铺id和订单号获取订单及详情
:param shop_id:
:param num:
:return:
"""
order = Order.objects.filter(shop_id=shop_id, order_num=num).first()
if not order:
return False, "订单不存在"
order.order_details = list_order_details_by_order_ids([order.id])
order_address = get_order_address_by_order_id(order.id)
if order_address:
order.address = order_address
# 设置顾客信息
for key in USER_OUTPUT_CONSTANT:
setattr(order.customer, key, getattr(order.customer.user, key))
return True, order
def get_order_by_shop_id_and_id(shop_id: int, order_id: int):
"""
通过商铺ID和订单ID获取订单
:param shop_id:
:param order_id:
:return:
"""
order = Order.objects.filter(id=order_id, shop_id=shop_id).first()
return order
def get_shop_order_by_shop_id_and_id(shop_id: int, order_id: int):
"""
通过商铺id和订单id获取订单及详情
:param shop_id:
:param order_id:
:return:
"""
order = Order.objects.filter(shop_id=shop_id, id=order_id).first()
if not order:
return False, "订单不存在"
order.order_details = list_order_details_by_order_ids([order.id])
order_address = get_order_address_by_order_id(order.id)
if order_address:
order.address = order_address
return True, order
def get_order_address_by_order_id(order_id: int):
"""
通过订单id获取订单地址
:param order_id:
:return:
"""
order_address = OrderAddress.objects.filter(order_id=order_id).first()
if order_address:
order_address.address = order_address.full_address
return order_address
def get_shop_order_by_num_without_details(shop_id: int, order_num: int):
"""
通过订单号获取一个订单
:param shop_id:
:param order_num:
:return:
"""
order = Order.objects.filter(shop_id=shop_id, order_num=order_num).first()
return order
def list_order_details_by_order_ids(order_ids: list):
"""
通过订单id列表获取订单详情
:param order_ids:
:return:
"""
order_details = OrderDetail.objects.filter(order_id__in=order_ids).all()
for order_detail in order_details:
order_detail.product_name = order_detail.product.name
order_detail.product_id = order_detail.product.id
order_detail.product_cover_picture = order_detail.product.cover_image_url
return order_details
def list_order_with_order_details_by_product_id(shop_id: int, product_id: int):
"""
通过货品ID查询出其对应的销售记录(订单记录)
:param shop_id:
:param product_id:
:return:
"""
order_with_order_details_query = Order.objects.filter(
shop_id=shop_id, order_detail__product_id=product_id
).order_by("id")
order_with_order_details = order_with_order_details_query.all()
for order in order_with_order_details:
for od in order.order_detail.all():
if not od.product_id == product_id:
continue
else:
order_detail = od
order.price_net = order_detail.price_net
order.quantity_net = order_detail.quantity_net
order.amount_net = order_detail.amount_net
for key in USER_OUTPUT_CONSTANT:
setattr(order.customer, key, getattr(order.customer.user, key))
return order_with_order_details
def get_order_by_num_for_update(num: str):
"""
通过订单获取订单-加锁
:param num:
:return:
"""
result = Order.objects.select_for_update().filter(num=num).first()
return result
def get_order_detail_by_id_only_msg_notify(order_id: int):
"""
通过订单ID获取订单及详情,专供订单微信消息通知使用,其他地方不要调用
:param order_id:
:return:
"""
order = Order.objects.filter(id=order_id).first()
if not order:
return False, "订单不存在"
order.order_details = list_order_details_by_order_ids([order.id])
order.address = get_order_address_by_order_id(order.id)
return True, order
def get_customer_order_by_id(customer_ids: list, order_id: int):
"""
通过客户ids和订单id查找一个客户的订单
:param customer_ids:
:param order_id:
:return:
"""
order = Order.objects.filter(customer_id__in=customer_ids, id=order_id).first()
return order
def get_customer_order_with_detail_by_id(customer_ids: list, order_id: int):
"""
通过客户ids和订单id查找一个客户的订单详情
:param customer_ids:
:param order_id:
:return:
"""
order = get_customer_order_by_id(customer_ids, order_id)
if not order:
return False, "订单不存在"
# 查找订单地址与订单商品详情
order.order_details = list_order_details_by_order_ids([order.id])
order.address = get_order_address_by_order_id(order.id)
# 查找配送记录
if order.order_status in [OrderStatus.CONFIRMED, OrderStatus.FINISHED]:
# 查找最新的操作时间,作为订单开始或送达时间
order.delivery_time = get_order_log_time_by_order_num(order.order_num)
return True, order
def get_order_by_customer_id_and_groupon_attend_id(customer_id: int, groupon_attend_id: int):
"""
通过客户id和拼团参与id获取订单
:param customer_id:
:param groupon_attend_id:
:return:
"""
order = (
Order.objects.filter(
customer_id=customer_id,
groupon_attend_id=groupon_attend_id,
order_status__in=[
OrderStatus.UNPAID,
OrderStatus.PAID,
OrderStatus.CONFIRMED,
OrderStatus.REFUNDED,
OrderStatus.FINISHED,
OrderStatus.WAITTING,
]
).first()
)
return order
def list_shop_orders(
shop_id: int,
order_types: list,
order_pay_types: list,
order_delivery_methods: list,
order_status: list,
num: str = None,
):
"""
获取店铺订单列表
:param shop_id:
:param order_types:
:param order_pay_types:
:param order_delivery_methods:
:param order_status:
:param num:
:return:
"""
if num:
order_list = (
Order.objects.filter(shop_id=shop_id, order_num=num)
.filter(
order_status__in=[
OrderStatus.PAID,
OrderStatus.CONFIRMED,
OrderStatus.FINISHED,
OrderStatus.REFUNDED,
]
)
.all()
)
else:
order_list =(
Order.objects.filter(shop_id=shop_id)
.filter(
order_type__in=order_types,
pay_type__in=order_pay_types,
delivery_method__in=order_delivery_methods,
order_status__in=order_status
)
.order_by(
"order_status", "delivery_method", "delivery_period", "-id"
)
.all()
)
# 订单详情
order_ids = [order.id for order in order_list]
order_details = list_order_details_by_order_ids(order_ids)
map_order_lines = defaultdict(list)
for order_detail in order_details:
map_order_lines[order_detail.order_id].append(order_detail)
# 拼数据
for order in order_list:
order.order_details = map_order_lines.get(order.id)
# 拼装顾客数据
for _ in USER_OUTPUT_CONSTANT:
setattr(order.customer, _ , getattr(order.customer.user, _))
return order_list
def list_customer_orders(
shop_id: int,
customer_id: int,
order_types: list,
order_pay_types: list,
order_delivery_methods: list,
order_status: list,
):
"""
获取一个客户的历史订单列表
:param shop_id:
:param customer_id:
:param order_types:
:param order_pay_types:
:param order_delivery_methods:
:param order_status:
:return:
"""
order_list_query = Order.objects.filter(shop_id=shop_id, customer_id=customer_id)
if order_types:
order_list_query = order_list_query.filter(order_types__in=order_types)
if order_pay_types:
order_list_query = order_list_query.filter(order_pay_types__in=order_pay_types)
if order_delivery_methods:
order_list_query = order_list_query.filter(delivery_method__in=order_delivery_methods)
if order_status:
order_list_query = order_list_query.filter(order_status__in=order_status)
order_list = order_list_query.order_by(
"order_status", "delivery_method", "delivery_period", "-id"
).all()
# 订单详情
order_ids = [order.id for order in order_list]
order_details = list_order_details_by_order_ids(order_ids)
map_order_lines = defaultdict(list)
for order_detail in order_details:
map_order_lines[order_detail.order_id].append(order_detail)
# 拼数据
for order in order_list:
order.order_details = map_order_lines.get(order.id)
return order_list
def list_customer_order_by_customer_ids(customer_ids: list, order_status: list):
"""
通过用户ID查出一个用户(对应多个客户)的所有订单
:param customer_ids:
:param order_status:
:return:
"""
order_list_query = (
Order.objects.filter(customer_id__in=customer_ids)
.order_by("-create_time")
)
if order_status:
order_list_query = order_list_query.filter(order_status__in=order_status)
order_list = order_list_query.all()
# 订单详情
order_ids = [order.id for order in order_list]
order_details = list_order_details_by_order_ids(order_ids)
map_order_lines = defaultdict(list)
for order_detail in order_details:
map_order_lines[order_detail.order_id].append(order_detail)
# 拼数据
for order in order_list:
order.order_details = map_order_lines.get(order.id)
if order.delivery:
order.delivery_type = order.delivery.delivery_type
return order_list
def list_order_details_by_order_id(order_id: int):
"""
通过订单ID获取子订单列表
:param order_id:
:return:
"""
order_detail_list = OrderDetail.objects.filter(order_id=order_id).all()
return order_detail_list
def list_shop_abnormal_orders(
shop_id: int,
order_types: list,
order_pay_types: list,
order_delivery_methods: list,
order_status: list,
num: str = None,
):
"""
获取商铺的异常订单列表
:param shop_id:
:param order_types:
:param order_pay_types:
:param order_delivery_methods:
:param order_status:
:param num:
:return:
"""
# 获取异常订单列表
if num:
orders = (
Order.objects.filter(
shop_id=shop_id, order_num=num, order_status=OrderStatus.REFUND_FAIL
)
)
else:
orders = (
Order.objects.filter(
order_type__in=order_types,
pay_type__in=order_pay_types,
delivery_method__in=order_delivery_methods,
order_status__in=order_status,
)
)
order_list = orders.order_by("delivery_method", "delivery_period", "-id").all()
# 订单详情
order_ids = [order.id for order in order_list]
order_details = list_order_details_by_order_ids(order_ids)
map_order_lines = defaultdict(list)
for order_detail in order_details:
map_order_lines[order_detail.order_id].append(order_detail)
# 拼数据
for order in order_list:
order.order_details = map_order_lines.get(order.id)
return order_list
def count_abnormal_order(shop_id: int):
"""
获取一个店铺异常(退款失败)的订单数
:param shop_id:
:return:
"""
count = (
Order.objects.filter(
shop_id=shop_id,
order_status=OrderStatus.REFUND_FAIL,
)
.count()
)
return count
def list_order_by_groupon_attend_id(shop_id: int, groupona_attend_id: int):
"""
通过拼团参与ID列出一个团的所有订单
:param shop_id:
:param groupona_attend_id:
:return:
"""
order_list = (
Order.objects.filter(
shop_id=shop_id,
groupona_attend_id=groupona_attend_id,
order_status__in=[
OrderStatus.UNPAID,
OrderStatus.PAID,
OrderStatus.CONFIRMED,
OrderStatus.FINISHED,
OrderStatus.REFUNDED,
OrderStatus.WAITTING,
OrderStatus.REFUND_FAIL,
]
)
.order_by('id')
.all()
)
# 订单详情
order_ids = [order.id for order in order_list]
order_details = list_order_details_by_order_ids(order_ids)
map_order_lines = defaultdict(list)
for order_detail in order_details:
map_order_lines[order_detail.order_id].append(order_detail)
# 拼数据
for order in order_list:
order.order_details = map_order_lines.get(order.id)
def list_waitting_order_by_groupon_attend_id(groupon_attend_id: int):
"""
通过拼团参与id列出拼团中的订单
:param groupon_attend_id:
:return:
"""
orders = Order.objects.filter(
groupon_attend_id=groupon_attend_id, order_status=OrderStatus.WAITTING
).all()
return orders
def list_unpaid_order_by_groupon_attend_id(groupon_attend_id: int):
"""
通过拼团参与id列出未支付的订单
:param groupon_attend_id:
:return:
"""
orders = Order.objects.filter(
groupon_attend_id=groupon_attend_id, order_status=OrderStatus.UNPAID
).all()
return orders
def list_unpay_order_by_groupon_attend_ids(groupon_attend_ids: list):
"""
通过拼团参与id得到订单
:param groupon_attend_ids:
:return:
"""
orders = Order.objects.filter(
groupon_attend_id__in=groupon_attend_ids, order_status=OrderStatus.UNPAID
).all()
return orders<file_sep>/wsc_django/wsc_django/apps/shop/migrations/0002_auto_20210606_2054.py
# Generated by Django 3.1.6 on 2021-06-06 12:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('shop', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='shop',
name='super_admin',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='商铺老板'),
),
migrations.AddField(
model_name='paychannel',
name='shop',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shop.shop', verbose_name='店铺对象'),
),
migrations.AddIndex(
model_name='shop',
index=models.Index(fields=['shop_code'], name='ux_shop_code'),
),
migrations.AddIndex(
model_name='shop',
index=models.Index(fields=['super_admin'], name='ix_super_admin'),
),
]
<file_sep>/wsc_django/wsc_django/apps/pvuv/interface.py
from product.services import list_product_ids_by_shop_id
def list_product_ids_by_shop_id_interface(shop_id: int, status: list):
"""列出一个店铺的商品ID列表interface"""
product_ids = list_product_ids_by_shop_id(shop_id, status)
return product_ids<file_sep>/wsc_django/wsc_django/apps/storage/constant.py
# 货品库存变记录更类型
class ProductStorageRecordType:
MANUAL_MODIFY = 1 # 手动修改
MALL_SALE = 2 # 商城售出
ORDER_CANCEL = 3 # 订单取消
ORDER_REFUND = 4 # 订单退款
PRODUCT_STORAGE_RECORD_TYPE = {
ProductStorageRecordType.MANUAL_MODIFY: "手动修改",
ProductStorageRecordType.MALL_SALE: "商城售出",
ProductStorageRecordType.ORDER_CANCEL: "订单取消",
ProductStorageRecordType.ORDER_REFUND: "订单退款",
}
# 货品库存变更记录状态, 预留用
class ProductStorageRecordStatus:
NORMAL = 1
DELETE = 0
# 库存变更记录的操作人
class ProductStorageRecordOperatorType:
STAFF = 1 # 员工
CUSTOMER = 2 # 客户
PRODUCT_STORAGE_RECORD_OPERATOR_TYPE = {
ProductStorageRecordOperatorType.STAFF: "员工",
ProductStorageRecordOperatorType.CUSTOMER: "客户",
}
<file_sep>/wsc_django/wsc_django/apps/demo/urls.py
"""
店铺设置相关路由
"""
from django.urls import path
from demo import views
urlpatterns = [
path('api/demo/', views.DemoView.as_view()) # 测试接口
]
<file_sep>/wsc_django/wsc_django/apps/customer/constant.py
class CustomerPointType:
FIRST = 1
CONSUME = 2
REFUND = 3
# TODO 待补充
CUSTOMER_POINT_TYPE = {
CustomerPointType.FIRST: "店铺首单",
CustomerPointType.CONSUME: "购买货品",
CustomerPointType.REFUND: "退款",
}
class MineAddressDefault:
YES = 1
NO = 0
class MineAddressStatus:
NORMAL = 1
DELETE = 0<file_sep>/wsc_django/wsc_django/apps/celery_tasks/config.py
from kombu import Exchange, Queue
timezone = 'Asia/Shanghai' # 任务时区
CELERY_BROKER = "amqp://guest@localhost:5672//" # broker
task_soft_time_limit = 600, # 任务超时时间
CELERY_CONCURRENCY = 4 # 任务并发数
worker_disable_rate_limits = True # 任务频率限制开关
task_queues = ( #设置add队列,绑定routing_key
Queue('wsc_auto_work', Exchange("wsc_auto_work"), routing_key='wsc_auto_work'),
)
task_routes = {
"auto_cancel_order": {
'queue': 'wsc_auto_work',
'routing_key': 'wsc_auto_work',
},
"auto_publish_groupon": {
'queue': 'wsc_auto_work',
'routing_key': 'wsc_auto_work',
},
"auto_expire_groupon": {
'queue': 'wsc_auto_work',
'routing_key': 'wsc_auto_work',
},
"auto_fail_groupon_attend": {
'queue': 'wsc_auto_work',
'routing_key': 'wsc_auto_work',
},
"auto_validate_groupon_attend": {
'queue': 'wsc_auto_work',
'routing_key': 'wsc_auto_work',
}
} | 65f856bb3c782fe2fec794192260d5b7aa997ef3 | [
"Python",
"INI"
] | 151 | Python | hzh595395786/wsc_django | c0a4de1a4479fe83f36108c1fdd4d68d18348b8d | 0c8faf0cac1d8db8d9e3fa22f6914b6b64bf788b |
refs/heads/master | <file_sep># Ride: Tasks web application
This module adds a UI to run different time-consuming tasks.
## Task
The _Task_ interface is what you need to implement for each task you want to offer.
You can inherit from _AbstractTask_ to omit the obvious functions.
### Define Your Task
To make the application know your task, add a dependency definition for it in your _dependencies.json_:
```json
{
"dependencies": [
{
"interfaces": "ride\\web\\base\\task\\Task",
"class": "my\\TestTask",
"id": "test"
}
]
}
```
## Code Sample
Check the following code sample to get you on your way:
```php
<?php
namespace my;
use ride\library\i18n\translator\Translator;
use ride\library\form\FormBuilder;
use ride\library\system\file\browser\FileBrowser;
use ride\web\base\task\AbstractTask;
class TestTask extends AbstractTask {
/**
* Name of this task, should be the same as your dependency id
* @var string
*/
const NAME = 'test';
/**
* Constructs a new test task
* @param \ride\library\system\file\browser\FileBrowser $fileBrowser Let's
* use the file browser to retrieve the job result
*/
public function __construct(FileBrowser $fileBrowser) {
$this->fileBrowser = $fileBrowser;
}
/**
* Hook to prepare the form to ask for extra arguments
* @param \ride\library\form\FormBuilder $form
* @param \ride\library\i18n\translator\Translator $translator
* @return null
*/
public function prepareForm(FormBuilder $form, Translator $translator) {
// if you need extra information for your task, you can use this hook
// to prepare a form which asks for these arguments
$form->addRow('test', 'string', array(
'validators' => array('required' => array()),
));
}
/**
* Gets the queue job of this task
* @param array $data Extra arguments from the form
* @return \ride\library\queue\job\QueueJob Job to invoke
*/
public function getQueueJob(array $data) {
// your extra arguments, as defined in prepareForm, will be passed on to
// this method
// you should return a QueueJob which holds the logic of your task
return new TestQueueJob();
}
/**
* Gets the result of this task
* @param string $queueJobId Id of the invoked queue job
* @return mixed
*/
public function getResult($jobId) {
// extract the result
$application = $this->fileBrowser->getApplicationDirectory();
return $application->getChild('data/test-' . $jobId . '.txt');
}
}
<file_sep><?php
namespace ride\web\base\task;
use ride\library\i18n\translator\Translator;
use ride\library\form\FormBuilder;
/**
* Interface to invoke a timely task
*/
interface Task {
/**
* Gets the display name of this task
* @param \ride\library\i18n\translator\Translator $translator
* @return string
*/
public function getDisplayName(Translator $translator);
/**
* Hook to prepare the form to ask for extra arguments
* @param \ride\library\form\FormBuilder $form
* @param \ride\library\i18n\translator\Translator $translator
* @return null
*/
public function prepareForm(FormBuilder $form, Translator $translator);
/**
* Gets the queue job of this task
* @param array $data Extra arguments from the form
* @return \ride\library\queue\job\QueueJob Job to invoke
*/
public function getQueueJob(array $data);
/**
* Gets the result of this task
* @param string $queueJobId Id of the invoked queue job
* @return mixed
*/
public function getResult($queueJobId);
}
<file_sep><?php
namespace ride\web\base\controller;
use ride\library\dependency\exception\DependencyException;
use ride\library\http\Response;
use ride\library\mvc\view\View;
use ride\library\queue\dispatcher\QueueDispatcher;
use ride\library\queue\QueueManager;
use ride\library\system\file\File;
use ride\library\validation\exception\ValidationException;
use ride\library\validation\factory\ValidationFactory;
use ride\web\base\task\Task;
/**
* Controller to select and invoke a timely task
*/
class TaskController extends AbstractController {
/**
* Action to select the task to invoke
* @return null
*/
public function selectAction() {
$translator = $this->getTranslator();
$taskOptions = array();
$tasks = $this->dependencyInjector->getAll('ride\\web\\base\\task\\Task');
foreach ($tasks as $taskId => $task) {
$taskOptions[$taskId] = $task->getDisplayName($translator);
}
$form = $this->createFormBuilder();
$form->addRow('task', 'option', array(
'label' => $translator->translate('label.task'),
'description' => $translator->translate('label.task.select.description'),
'options' => $taskOptions,
'validators' => array(
'required' => array(),
),
));
$form = $form->build();
if ($form->isSubmitted()) {
try {
$form->validate();
$data = $form->getData();
$url = $this->getUrl('admin.task.invoke', array(
'task' => $data['task'],
));
$this->response->setRedirect($url);
return;
} catch (ValidationException $exception) {
$this->setValidationException($exception, $form);
}
}
$this->setTemplateView('task/select', array(
'form' => $form->getView(),
'tasks' => $tasks,
));
}
/**
* Action to ask extra task arguments before queueing the task for
* invocation
* @param string $task Id of the task
* @return null
*/
public function invokeAction(QueueDispatcher $queueDispatcher, $task) {
$taskId = $task;
try {
$task = $this->dependencyInjector->get('ride\\web\\base\\task\\Task', $taskId);
} catch (DependencyException $exception) {
$this->response->setNotFound();
return;
}
$translator = $this->getTranslator();
$form = $this->createFormBuilder();
$task->prepareForm($form, $translator);
$form->build();
if ($form->isSubmitted()) {
try {
$form->validate();
$data = $form->getData();
$job = $task->getQueueJob($data);
$jobStatus = $queueDispatcher->queue($job);
$url = $this->getUrl('admin.task.progress', array(
'task' => $taskId,
'job' => $jobStatus->getId(),
));
$this->response->setRedirect($url);
return;
} catch (ValidationException $exception) {
$this->setValidationException($exception, $form);
}
}
$this->setTemplateView('task/invoke', array(
'name' => $task->getDisplayName($translator),
'task' => $task,
'form' => $form->getView(),
));
}
/**
* Action to show a progress indicator of the task
* @param \ride\library\queue\QueueManager $queueManager
* @param string $task Id of the task
* @param string $job Id of the queue job
* @return null
*/
public function progressAction(QueueManager $queueManager, $task, $job) {
$taskId = $task;
try {
$task = $this->dependencyInjector->get('ride\\web\\base\\task\\Task', $taskId);
} catch (DependencyException $exception) {
$this->response->setNotFound();
return;
}
$job = $queueManager->getQueueJobStatus($job);
if (!$job) {
$this->response->setNotFound();
return;
}
$this->setTemplateView('task/progress', array(
'name' => $task->getDisplayName($this->getTranslator()),
'taskId' => $taskId,
'task' => $task,
'queueJobStatus' => $job,
));
}
/**
* Action to finish up an invokation of a task
* @param \ride\library\queue\QueueManager $queueManager
* @param string $task Id of the task
* @param string $job Id of the queue job
* @return null
*/
public function finishAction(QueueManager $queueManager, ValidationFactory $validationFactory, $task, $job) {
$taskId = $task;
$jobId = $job;
try {
$task = $this->dependencyInjector->get('ride\\web\\base\\task\\Task', $taskId);
} catch (DependencyException $exception) {
$this->response->setNotFound();
return;
}
$job = $queueManager->getQueueJobStatus($jobId);
if ($job) {
$this->response->setStatusCode(Response::STATUS_CODE_SERVER_ERROR);
$this->setTemplateView('task/error', array(
'name' => $task->getDisplayName($this->getTranslator()),
'taskId' => $taskId,
'task' => $task,
'queueJobStatus' => $job,
));
return;
}
$websiteValidator = $validationFactory->createValidator('website', array());
$result = $task->getResult($jobId);
if ($result instanceof File) {
$this->setDownloadView($result, $result->getName(), true);
} elseif ($result instanceof View) {
$this->response->setView($result);
} elseif ($websiteValidator->isValid($result)) {
$this->response->setRedirect($result);
} else {
$this->setTemplateView('task/finish', array(
'name' => $task->getDisplayName($this->getTranslator()),
'taskId' => $taskId,
'task' => $task,
'result' => $result,
));
}
}
}
<file_sep><?php
namespace ride\web\base\task;
use ride\library\i18n\translator\Translator;
use ride\library\form\FormBuilder;
/**
* Abstract implementation to invoke a timely task
*/
abstract class AbstractTask implements Task {
/**
* Gets the display name of this task
* @param \ride\library\i18n\translator\Translator $translator
* @return string
*/
public function getDisplayName(Translator $translator) {
return $translator->translate('task.' . static::NAME);
}
/**
* Hook to prepare the form to ask for extra arguments
* @param \ride\library\form\FormBuilder $form
* @param \ride\library\i18n\translator\Translator $translator
* @return null
*/
public function prepareForm(FormBuilder $form, Translator $translator) {
}
/**
* Gets the result of this task
* @param string $queueJobId Id of the invoked queue job
* @return mixed
*/
public function getResult($queueJobId) {
return null;
}
}
| c960657242037f11e99781c0f5fc6fc819da6cc7 | [
"Markdown",
"PHP"
] | 4 | Markdown | all-ride/ride-wba-task | a3f02a3ae59dd7fe518b4521a099da14cf290ca0 | 74469b5ecac6e1c342d50e95660104be540fbdec |
refs/heads/master | <file_sep>import {Component, OnInit } from '@angular/core';
import {ActivatedRoute, Params} from "@angular/router"
import { OfertasServico } from '../../ofertas.service';
import {Oferta} from "../../shared/oferta.model"
@Component({
selector: 'app-onde-fica',
templateUrl: './onde-fica.component.html',
styleUrls: ['./onde-fica.component.css'],
providers:[OfertasServico]
})
export class OndeFicaComponent implements OnInit {
public ondeFica:Oferta
constructor(private route:ActivatedRoute, private ofertaServico:OfertasServico) { }
ngOnInit() {
this.route.parent.params.subscribe((parametros:Params)=>{
this.ofertaServico.getOndeFicaOfertaPorId(parametros.id)
.then((oferta)=>{
this.ondeFica = oferta
})
})
}
}
<file_sep>import {Http, Response} from "@angular/http"
import {Injectable} from "@angular/core"
import { Oferta } from "./shared/oferta.model";
import {URL_API, URL_API_USAR, URL_API_ONDE} from "./app.api"
import { Observable} from 'rxjs';
import { map } from 'rxjs/operators';
import { retry } from 'rxjs/operators';
@Injectable()
export class OfertasServico{
constructor(private http: Http){}
public getOfertas(): Promise<Oferta[]>{
return this.http.get(`${URL_API}?destaque=true`)
.toPromise()
.then((resposta: Response) => resposta.json())
}
public getOfertaPorCategorias(categorias:string): Promise<Oferta[]>{
return this.http.get(`${URL_API}?categoria=${categorias}`)
.toPromise()
.then((resposta: Response)=>resposta.json())
}
public getOfertaPorId(id:number):Promise<Oferta>{
return this.http.get(`${URL_API}?id=${id}`)
.toPromise()
.then((resposta:Response)=>resposta.json()[0])
}
public getComoUsarOfertaPorId(id:number):Promise<Oferta>{
return this.http.get(`${URL_API_USAR}?id=${id}`)
.toPromise()
.then((resposta:Response)=>resposta.json()[0])
}
public getOndeFicaOfertaPorId(id:number):Promise<Oferta>{
return this.http.get(`${URL_API_ONDE}?id=${id}`)
.toPromise()
.then((resposta:Response)=>resposta.json()[0])
}
public pesquisaOferta(termo:string): Observable<Oferta[]>{
return this.http.get(`${URL_API}?descricao_oferta_like=${termo}`)
.pipe(map((resposta: Response)=> resposta.json()), retry(10))
}
}<file_sep>import { Component, OnInit } from '@angular/core';
import {ActivatedRoute, Params} from "@angular/router";
import {OfertasServico} from '../../ofertas.service';
import {Oferta} from "../../shared/oferta.model"
@Component({
selector: 'app-como-usar',
templateUrl: './como-usar.component.html',
styleUrls: ['./como-usar.component.css'],
providers:[OfertasServico]
})
export class ComoUsarComponent implements OnInit {
public comoUsar:Oferta
constructor(private route:ActivatedRoute, private ofertaServico:OfertasServico) { }
ngOnInit() {
this.route.parent.params.subscribe((parametros:Params)=>{
this.ofertaServico.getComoUsarOfertaPorId(parametros.id)
.then((oferta)=>{
this.comoUsar = oferta
})
})
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import {Oferta} from "../shared/oferta.model" //Modelo de objeto (interface)
import {OfertasServico} from "../ofertas.service" //O seriviço
@Component({
selector: 'app-diversao',
templateUrl: './diversao.component.html',
styleUrls: ['./diversao.component.css'],
providers:[OfertasServico]
})
export class DiversaoComponent implements OnInit {
public ofertas:Oferta[]
constructor(private ofertasServico:OfertasServico) { }
ngOnInit() {
this.ofertasServico.getOfertaPorCategorias("diversao")
.then((ofertas:Oferta[])=>{
this.ofertas = ofertas
})
}
}
<file_sep>export const URL_API_RAIZ = "http://localhost:3000"
export const URL_API = "http://localhost:3000/ofertas"
export const URL_API_USAR = "http://localhost:3000/como-usar"
export const URL_API_ONDE = "http://localhost:3000/onde-fica"<file_sep>import { Component, OnInit } from '@angular/core';
import {OfertasServico} from "../ofertas.service"
import { Observable, Subject, of } from 'rxjs';
import {Oferta} from "../shared/oferta.model"
import { switchMap, debounceTime, distinctUntilChanged, catchError } from 'rxjs/operators';
@Component({
selector: 'app-topo',
templateUrl: './topo.component.html',
styleUrls: ['./topo.component.css'],
providers:[OfertasServico]
})
export class TopoComponent implements OnInit {
public oferta:Observable<Oferta[]>
private subjectPesquisa: Subject<string> = new Subject<string>() //Cria um novo observable
constructor(private ofertaServico:OfertasServico) { }
ngOnInit() {
//this.oferta vai retornar um Oferta[]
this.oferta = this.subjectPesquisa
.pipe(
debounceTime(1000), //executa a ação do switchMap após 1s
distinctUntilChanged(), //Para não fazer uma nova requisiçao com uma palavra ja pesquisada
switchMap((termo: string) => {
if(termo.trim() === ""){
//Retorna um array vazio
return of<Oferta[]>([])
}
return this.ofertaServico.pesquisaOferta(termo) // envia os dados para o service fazer a requizição ao server e retorna um Oferta[]
}),
catchError((err: any)=>{
return of<Oferta[]>([])
})
)
}
public pesquisa(termoDaBusca:string):void{
this.subjectPesquisa.next(termoDaBusca) //O método .next irá jogar o termo da pesquisa no stream do Observable (Subject)
}
limparPesquisa(): void{
this.subjectPesquisa.next("")
}
}
/*Trabalhando o retorno da requisição a API
this.oferta.subscribe((ofertas: Oferta[])=> {
console.log(ofertas)
this.oferta2 = ofertas
})*/
/*
this.oferta = this.ofertaServico.pesquisaOferta(termoDaBusca)
this.oferta.subscribe(
(ofertas:Oferta[])=>console.log(ofertas),
(erro:any)=>console.log("Aconteceu algum erro" + erro.status),
()=>console.log("Fluxo ocorrido com sucesso!")
)
*/<file_sep>import { Component, OnInit, OnDestroy } from '@angular/core';
import {ActivatedRoute, Params} from "@angular/router"
import {OfertasServico} from "../ofertas.service"
import {Oferta} from "../shared/oferta.model"
import { interval } from 'rxjs';
import {ItemCarrinhoServico} from '../ordem.service'
@Component({
selector: 'app-oferta',
templateUrl: './oferta.component.html',
styleUrls: ['./oferta.component.css'],
providers:[OfertasServico]
})
export class OfertaComponent implements OnInit, OnDestroy {
public ofertas:Oferta
constructor(
private router: ActivatedRoute,
private ofertaServico:OfertasServico,
private itemCarrinhoServico: ItemCarrinhoServico
) { }
ngOnInit() {
this.router.params.subscribe((parametro: Params)=>{
this.ofertaServico.getOfertaPorId(parametro.id)
.then((oferta)=>{
this.ofertas = oferta
})
})
this.itemCarrinhoServico.exibirItens()
/*let tempo = interval(1500)
tempo.subscribe(n=>console.log)*/
}
ngOnDestroy(){
}
public adicionarItemCarrinho(): void{
this.itemCarrinhoServico.incluirItem(this.ofertas)
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { OrdemDeCompraService } from '../ordem-compra.service'
import { Pedido } from '../shared/pedido.model'
import {FormGroup, FormControl, Validators} from '@angular/forms'
import {ItemCarrinhoServico} from '../ordem.service'
import { ItemCarrinho } from '../shared/item-carrinho.model';
@Component({
selector: 'app-ordem-compra',
templateUrl: './ordem-compra.component.html',
styleUrls: ['./ordem-compra.component.css'],
providers: [ OrdemDeCompraService]
})
export class OrdemCompraComponent implements OnInit {
public idPedidoCompra:number
public itemCarrinho: ItemCarrinho[]
//Form
public formulario: FormGroup = new FormGroup({
'endereco': new FormControl(null, [Validators.required, Validators.minLength(3), Validators.maxLength(120)]),
'numero': new FormControl(null, [Validators.required, Validators.minLength(1), Validators.maxLength(20)]),
'complemento': new FormControl(null),
'formaPagamento': new FormControl(null, [Validators.required])
})
//and form
constructor(
private ordemCompraService: OrdemDeCompraService,
private itemCarrinhoServico: ItemCarrinhoServico
) { }
ngOnInit() {
this.itemCarrinho = this.itemCarrinhoServico.exibirItens()
console.log(this.itemCarrinho)
}
public confirmarCompra():void{
if(this.formulario.status === "INVALID"){
this.formulario.get('endereco').markAsTouched()
this.formulario.get('numero').markAsTouched()
this.formulario.get('complemento').markAsTouched()
this.formulario.get('formaPagamento').markAsTouched()
}else{
if(this.itemCarrinhoServico.exibirItens().length === 0){
alert("Vc ainda não adicionou nenhum item no carrinho!")
}else{
let pedido:Pedido = new Pedido(
this.formulario.value.endereco,
this.formulario.value.numero,
this.formulario.value.complemento,
this.formulario.value.formaPagamento,
this.itemCarrinhoServico.exibirItens()
)
this.ordemCompraService.efetivarCompra(pedido)
.subscribe((idPedido: any) => {
this.idPedidoCompra = idPedido
this.itemCarrinhoServico.limparCarrinho()
})
}
}
}
public adicionar(item: ItemCarrinho){
this.itemCarrinhoServico.adicionarQuantidade(item)
}
public diminuir(item: ItemCarrinho){
this.itemCarrinhoServico.diminuirQuantidade(item)
}
}//class
| e9f80089fadfee0052f834364498e07efbc0a2b8 | [
"TypeScript"
] | 8 | TypeScript | detarso7/angular-front-compra-coletiva | aa84f7f8755a5d724fcaf844347598610a309864 | aece5d8d56eb3846558f20271205565f20311f4f |
refs/heads/master | <file_sep>module.exports = (sequelize, Sequelize) => {
const users_device_token = sequelize.define('users_device_token', {
device_id: {
type: Sequelize.STRING
},
user_id: {
type: Sequelize.BIGINT,
},
}, {
classMethods: {
associate: (models) => {
users_device_token.belongsTo(models.users, { foreignKey: 'user_id' });
}
}
});
return users_device_token;
};<file_sep>var express = require('express');
var router = express.Router();
router.get("/:id", function (req, res, next) {
});
// router.get("/kitchens/:id/", )
router.post('/save-single', function (req, res, next) {
res.send('respond with a resource');
});
router.post('/save-cart', function (req, res, next) {
res.send('respond with a resource');
});
module.exports = router;
<file_sep>const bcrypt = require('bcrypt');
const saltRounds = 10;
let generateHash = (password) => {
return new Promise((resolve, reject) => {
bcrypt.hash(password, saltRounds, function (err, hash) {
if (err) {
reject(err);
}
resolve(hash);
});
});
}
let compareHash = (password, hash) => {
return new Promise((resolve, reject) => {
bcrypt.compare(password, hash, function (err, res) {
if (err) {
reject(err);
}
if (res == false) {
reject("invalid password")
}
resolve(res);
});
});
}
let makeId = (length) => {
var result = '';
var characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
var charactersLength = characters.length;
for (var i = 0; i < length; i++) {
result += characters.charAt(Math.floor(Math.random() * charactersLength));
}
return result;
}
module.exports = { generateHash, compareHash, makeId };<file_sep>const sgMail = require('@sendgrid/mail');
sgMail.setApiKey(process.env.SENDGRID_API_KEY);
var sendEmailOtp = async (otp, email) => {
const msg = {
to: email,
from: '<EMAIL>',
subject: 'Your Login OTP',
text: `Your otp is: ${otp}`,
html: `<strong>Your otp is: ${otp}</strong>`,
};
return sgMail.send(msg);
}
var sendPasswordReset = async (token, email) => {
}
module.exports = { sendEmailOtp }<file_sep>var {kitchenCreateValidation} = require("../../utils/validations");
var {createKitchen, updateKitchen} = require("./../../database/admin/kitchens")
let {validationErrorResponse, successResponse} = require("../../utils/response");
let create = (req, res, next) => {
let name = req.body.name;
let dish_type = req.body.dish_type;
let address_line_1 = req.body.address_line_1;
let address_line_2 = req.body.address_line_2;
let address_line_3 = req.body.address_line_3;
let latitude = req.body.latitude;
let longitude = req.body.longitude;
let validationResult = kitchenCreateValidation(req, {
name: name,
dish_type: dish_type,
address_line_1: address_line_1,
address_line_2: address_line_2,
latitude: latitude,
longitude: longitude
})
if (validationResult.error) {
validationErrorResponse(res, validationResult.error.details);
return;
}
let point = { type: 'Point', coordinates: [latitude, longitude]};
createKitchen({
name: name,
dish_type: dish_type,
address_line_1: address_line_1,
address_line_2: address_line_2,
address_line_3: address_line_3 ? address_line_3 : "",
latitude: latitude ? latitude : "",
longitude: longitude ? longitude : "",
location: point,
status: 0 //closed
}).then((kitchen) => {
successResponse(res, kitchen);
}).catch((error) => {
validationErrorResponse(res, error);
})
}
//will add more explaination
let update = (req, res, next) => {
var id = req.params.id;
if (!id) {
validationErrorResponse(res, {message: "id is required"});
return;
}
updateKitchen.then(req.body, id).then((kitchen) => {
successResponse(res, kitchen);
}).catch((error) => {
validationErrorResponse(res, error);
})
}
module.exports = {create, update}<file_sep>module.exports = (sequelize, Sequelize) => {
const order_cancellation = sequelize.define('order_cancellation', {
order_id: {
type: Sequelize.STRING
},
cancel_type: {
type: Sequelize.STRING,
},
cancel_reason: {
type: Sequelize.STRING
},
refund_status: {
type: Sequelize.STRING
},
payments_id: {
type: Sequelize.BIGINT,
}
}, {
classMethods: {
associate: (models) => {
order_cancellation.belongsTo(models.orders, { foreignKey: 'order_id' });
}
}
});
return order_cancellation;
};<file_sep>let {registrationValidation, loginValidationEmail, deviceIdValidation, loginValidationMobile}
= require("../../utils/validations");
let {validationErrorResponse, successResponse} = require("../../utils/response");
let {
create, loginEmail, addToCart,
getForHome,
updateCartProduct, getFullCart, refreshJwt, refreshDeviceToken, updateOtpByUsername, verifyOtpByUsername
}
= require("../../database/common/users")
let {generateToken} = require("../../utils/token");
let {userTypes} = require("../../utils/constants")
let {generateHash, compareHash} = require("../../utils/bcrypt");
var {sendOtp} = require("../../utils/api");
let homePage = async (req, res, next) => {
console.log("homepage")
let bannerLinks= [
{ image: "https://dummyimage.com/600x400/000/fff" },
{ image: "https://dummyimage.com/600x400/000/fff" },
{ image: "https://dummyimage.com/600x400/000/fff" },
]
let result = await getForHome(req.body. latitude, req.body.longitude);
let response = {
banners:bannerLinks,
restaurants:result
}
successResponse(res, response);
}
var register = (req, res, next) => {
let email = req.body.email;
let full_name = req.body.full_name;
let password = <PASSWORD>;
let mobile = req.body.mobile;
console.log(req.body.mobile.length);
let validationResult = registrationValidation(req, {
email: email,
full_name: full_name,
password: <PASSWORD>,
mobile: mobile
})
if (validationResult.error) {
validationErrorResponse(res, validationResult.error.details);
return;
}
generateHash(password).then((hash) => {
return create(full_name, email, hash, mobile, userTypes.app_user)
}).then((result) => {
successResponse(res, result);
}).catch((error) => {
console.log(error);
validationErrorResponse(res, error);
})
}
var login = (req, res, next) => {
let email = req.body.email;
let password = req.body.password;
let validationResult = loginValidationEmail(req, {
email: email,
password: password,
})
if (validationResult.error) {
validationErrorResponse(res, validationResult.error.details);
return;
}
loginEmail(email).then((result) => {
user = result;
return compareHash(password, user.password)
}).then((hash) => {
return generateToken(user.id);
}).then((token) => {
user.token = token;
successResponse(res, user);
}).catch((error) => {
console.log(error);
validationErrorResponse(res, error);
});
}
var loginMobile = (req, res, next) => {
let mobile = req.body.mobile;
let password = <PASSWORD>;
let validationResult = loginValidationMobile(req, {
mobile: mobile
})
if (validationResult.error) {
validationErrorResponse(res, validationResult.error.details);
return;
}
loginMobile(mobile).then((result) => {
user = result;
return compareHash(password, user.password)
}).then((hash) => {
return generateToken(user.id);
}).then((token) => {
user.token = token;
successResponse(res, user);
}).catch((error) => {
console.log(error);
validationErrorResponse(res, error);
});
}
var refreshToken = (req, res, next) => {
let email = req.body.email;
let password = req.body.password;
let validationResult = loginValidationEmail(req, {
email: email,
password: <PASSWORD>,
})
console.log(email)
if (validationResult.error) {
return validationErrorResponse(res, validationResult.error.details);
}
refreshJwt(email, password).then((result) => {
user = result;
return generateToken(result.id);
}).then((token) => {
user.token = token;
successResponse(res, user);
}).catch((error) => {
console.log(error);
validationErrorResponse(res, error);
});
}
var refreshDeviceId = (req, res, next) => {
let validation = deviceIdValidation(req.body)
if (validation.error) {
return validationErrorResponse(res, validation.error.details);
}
refreshDeviceToken(req.body).then((deviceId) => {
successResponse(res, deviceId);
}).catch((error) => {
validationErrorResponse(res, error);
})
}
var profile = (req, res, next) => {
}
var getOrders = (req, res, next) => {
}
var getCart = (req, res, next) => {
var userId = req.params.userId;
getFullCart(userId).then((cart) => {
successResponse(res, cart)
}).catch((error) => {
validationErrorResponse(res, error);
})
}
var saveCart = (req, res, next) => {
console.log("----------------")
var userId = req.body.userId;
var productId = req.body.productId;
var quantity = req.body.quantity;
if (!productId) {
return validationErrorResponse(res, "productId is required");
}
if (!quantity) {
return validationErrorResponse(res, "quantity is required");
}
addToCart(userId, productId, quantity).then((cart) => {
successResponse(res, cart)
}).catch((error) => {
validationErrorResponse(res, error);
})
}
var updateCart = (req, res, next) => {
var userId = req.body.userId;
var productId = req.body.productId;
var quantity = req.body.quantity;
if (!productId) {
return validationErrorResponse(res, "productId is required");
}
if (!quantity) {
return validationErrorResponse(res, "quantity is required");
}
updateCartProduct(userId, productId, quantity).then((cart) => {
successResponse(res, cart)
}).catch((error) => {
validationErrorResponse(res, error);
})
}
let getOtp = async (req, res, next) => {
let username = req.body.username;
try {
let user = await updateOtpByUsername(username);
console.log(user)
let send = await sendOtp(user.mobile,
user.otp, user.email);
return successResponse(res, {otp: user.otp})
} catch (error) {
console.log(error);
return validationErrorResponse(res, error)
}
}
let verifyOtp = async (req, res, next) => {
let otp = req.body.otp;
let username = req.body.username;
try {
let user = await verifyOtpByUsername(username, otp);
console.log(user.id)
let token = await generateToken(user.id);
console.log(token);
return successResponse(res, {
message: "otp verified succesfully",
token: token
})
} catch (error) {
console.log(error)
return validationErrorResponse(res, {message: "Otp validation failed"})
}
}
module.exports = {
homePage,
loginMobile,
register,
getOtp,
verifyOtp,
login,
refreshToken,
refreshDeviceId,
getCart, saveCart, profile, getOrders, updateCart
}<file_sep>module.exports = (sequelize, Sequelize) => {
const discounts = sequelize.define('discounts', {
discount_type: {
type: Sequelize.STRING
},
minimum_value: {
type: Sequelize.INTEGER,
},
status: {
type: Sequelize.INTEGER
},
discount_value: {
type: Sequelize.STRING
}
}, {
classMethods: {
associate: (models) => {
discounts.belongsTo(models.orders, { foreignKey: 'order_id' });
discounts.belongsTo(models.restaurants, { foreignKey: 'discount_id' });
}
}
});
return discounts;
};<file_sep>var models = require("../../models")
var restaurants = models.restaurants;
let createKitchen = async (body) => {
try {
let createRst = await restaurants.create(body, {
plane: true
});
console.log("----------createRst-----------");
console.log(createRst);
console.log("----------createRst-----------");
return createRst;
} catch (error) {
console.log("----------error-----------");
console.log(error);
console.log("----------error-----------");
return Promise.reject({
message: error.original.sql,
stack: "database/kitchens.js/19"
});
}
}
let updateKitchen = async (body, id) => {
try {
let updateRst = restaurants.update(
body,
{
where: {
id: id
},
returning: true,
plain: true
}
)
return updateRst;
} catch (error) {
console.log("----------error-----------");
console.log(error);
console.log("----------error-----------");
return Promise.reject({
message: error,
stack: "database/kitchens.js/29"
});
}
}
let getList = async (body) => {
try {
var kitchens = await restaurants.findAll({
where: {
status: 1
}
})
return kitchens;
} catch (error) {
console.error(error);
return error;
}
}
module.exports = { createKitchen, updateKitchen, getList }<file_sep>let jwt = require("jsonwebtoken");
const { JWT_SECRET } = require("./constants")
let generateToken = async (userId) => {
let token = jwt.sign({
userId: userId
}, JWT_SECRET);
return token;
}
let validateToken = async (token) => {
try {
var decoded = jwt.verify(token, JWT_SECRET);
return decoded;
}
catch (error) {
console.log(error);
return error;
}
}
module.exports = { generateToken, validateToken }<file_sep>module.exports = (sequelize, Sequelize) => {
const payments = sequelize.define('payments', {
payment_type: {
type: Sequelize.STRING
},
amount: {
type: Sequelize.FLOAT,
},
transaction_id: {
type: Sequelize.BIGINT
},
status: {
type: Sequelize.INTEGER //0=failed, 1= done
},
}, {
classMethods: {
associate: (models) => {
payments.hasMany(models.discounts, { foreignKey: 'discount_id' });
}
}
});
return payments;
};<file_sep>'use strict';
module.exports = {
up: (queryInterface, Sequelize) => {
/* Add altering commands here.
Return a promise to correctly handle asynchronicity.
Example: */
return queryInterface.createTable('users', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
name: {
type: Sequelize.STRING
},
email: {
type: Sequelize.STRING,
unique: true,
allowNull: true
},
mobile: {
type: Sequelize.STRING,
unique: true
},
password: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
last_login: {
type: Sequelize.DATE
},
login_status: {
type: Sequelize.INTEGER
},
user_type: {
type: Sequelize.STRING
},
otp: {
type: Sequelize.INTEGER
},
otp_verified: {
type: Sequelize.INTEGER,
default: 0
},
createdAt: {
allowNull: true,
type: Sequelize.DATE,
default: Sequelize.fn("NOW")
},
updatedAt: {
allowNull: true,
type: Sequelize.DATE,
default: Sequelize.fn("NOW")
},
});
},
down: (queryInterface, Sequelize) => {
/* Add reverting commands here.
Return a promise to correctly handle asynchronicity.
Example: */
return queryInterface.dropTable('users');
}
};<file_sep>var models = require("../../models");
var payments = models.payments;
let savePayment = async (body) => {
try {
let payment = await payments.create(body);
return payment;
} catch (error) {
return error;
}
}
module.exports = { savePayment }<file_sep>var Joi = require("@hapi/joi");
let registrationValidation = (req, body) => {
const schema = Joi.object({
full_name: Joi.string().required(),
email: Joi.string().email({minDomainSegments: 2}),
password: Joi.string().required(),
mobile: Joi.number().integer().min(1000000000).max(9999999999)
});
const {error, value} = schema.validate({
full_name: body.full_name,
email: body.email,
mobile: body.mobile,
password: body.password
});
return {error, value};
}
let loginValidationEmail = (req, body) => {
const schema = Joi.object({
email: Joi.string().email({minDomainSegments: 2}).required(),
password: <PASSWORD>(),
});
const {error, value} = schema.validate({
email: body.email,
password: body.password
});
return {error, value};
}
let loginValidationMobile = (body) => {
const schema = Joi.object({
mobile: Joi.number().min(10).max(10)
});
const {error, value} = schema.validate({
mobile: body.mobile
});
return {error, value};
}
let loginValidationOtp = (body) => {
const schema = Joi.object({
otp: Joi.number().required()
});
const {error, value} = schema.validate({});
return {error, value};
}
let kitchenCreateValidation = (res, body) => {
const schema = Joi.object({
name: Joi.string().required(),
dish_type: Joi.string().required(),
address_line_1: Joi.string().required(),
address_line_2: Joi.string().required(),
latitude: Joi.string().required(),
longitude: Joi.string().required()
});
const {error, value} = schema.validate(body);
return {error, value};
}
var discountValidation = (body) => {
const schema = Joi.object({
discount_type: Joi.string().required(),
minimum_value: Joi.number().required(),
discount_value: Joi.string().required(),
});
const {error, value} = schema.validate(body);
return {error, value};
}
var paymentValidation = (body) => {
const schema = Joi.object({
payment_type: Joi.string().required(),
amount: Joi.number().required(),
transaction_id: Joi.string().required()
});
const {error, value} = schema.validate(body);
return {error, value};
}
var cartCheckoutValidation = (body) => {
const schema = Joi.object({
payment_id: Joi.number().required(),
user_id: Joi.number().required(),
});
const {error, value} = schema.validate(body);
return {error, value};
}
var singleCheckoutValidation = (body) => {
const schema = Joi.object({
payment_id: Joi.number().required(),
product_id: Joi.number().required(),
user_id: Joi.number().required(),
quantity: Joi.number().required(),
discount_id: Joi.number().require()
});
const {error, value} = schema.validate(body);
return {error, value};
}
var deviceIdValidation = (body) => {
const schema = Joi.object({
device_id: Joi.string().required(),
user_id: Joi.number().required(),
});
const {error, value} = schema.validate(body);
return {error, value};
}
let getKitchenListValidation = (body) => {
const schema = Joi.object({
device_id: Joi.string().required(),
user_id: Joi.number().required(),
});
const {error, value} = schema.validate(body);
return {error, value};
}
module.exports = {
singleCheckoutValidation,
deviceIdValidation,
cartCheckoutValidation,
registrationValidation, loginValidationEmail,
loginValidationMobile, loginValidationOtp, kitchenCreateValidation, discountValidation, paymentValidation
}
<file_sep>module.exports = (sequelize, Sequelize) => {
const address_types = sequelize.define('address_types', {
type: {
type: Sequelize.STRING
}
}, {
classMethods: {
associate: (models) => {
address_types.belongsTo(models.users_address, { foreignKey: 'type' });
}
}
});
return address_types;
};<file_sep>let getOrderByRestaurantId = (req, res, next) => {
}
let getOrderById = (req, res, next) => {
}
let changeOrderStatus = (req, res, next) => {
}
let getOrderInvoice = (req, res, next) => {
}
module.exports = { getOrderByRestaurantId, getOrderById }<file_sep>let { registrationValidation, loginValidationEmail } = require("../../utils/validations");
let { validationErrorResponse, successResponse } = require("../../utils/response");
let { create, loginEmail } = require("../../database/common/users");
let { generateToken } = require("../../utils/token");
let { userTypes } = require("../../utils/constants");
let { generateHash, compareHash } = require("../../utils/bcrypt");
var register = (req, res, next) => {
let email = req.body.email;
let full_name = req.body.full_name;
let password = <PASSWORD>;
let mobile = req.body.mobile;
console.log(req.body.mobile.length);
let validationResult = registrationValidation(req, {
email: email,
full_name: full_name,
password: <PASSWORD>,
mobile: mobile
})
if (validationResult.error) {
validationErrorResponse(res, validationResult.error.details);
return;
}
generateHash(password).then((hash) => {
return create(full_name, email, hash, mobile, userTypes.admin);
}).then((result) => {
successResponse(res, result);
}).catch((error) => {
console.log(error);
validationErrorResponse(res, error);
})
}
var login = (req, res, next) => {
let email = req.body.email;
let password = req.body.password;
let validationResult = loginValidationEmail(req, {
email: email,
password: <PASSWORD>,
})
if (validationResult.error) {
validationErrorResponse(res, validationResult.error.details);
return;
}
var user = {}
loginEmail(email).then((result) => {
user = result;
return compareHash(password, user.password)
}).then((hash) => {
return generateToken(user.id);
}).then((token) => {
user.token = token;
successResponse(res, user);
}).catch((error) => {
console.log(error);
validationErrorResponse(res, error);
});
}
var loginMobile = (req, res, next) => {
let mobile = req.body.mobile;
let password = req.body.password;
let validationResult = loginValidationMobile(req, {
mobile: mobile
})
if (validationResult.error) {
validationErrorResponse(res, validationResult.error.details);
return;
}
loginMobile(mobile).then((result) => {
user = result;
return compareHash(password, user.password)
}).then((hash) => {
return generateToken(user.id);
}).then((token) => {
user.token = token;
successResponse(res, user);
}).catch((error) => {
console.log(error);
validationErrorResponse(res, error);
});
}
var refreshToken = (req, res, next) => {
let email = req.body.email;
let password = req.body.<PASSWORD>;
let validationResult = loginValidationEmail(req, {
email: email,
password: <PASSWORD>,
})
console.log(email)
if (validationResult.error) {
return validationErrorResponse(res, validationResult.error.details);
}
refreshJwt(email, password).then((result) => {
user = result;
return generateToken(result.id);
}).then((token) => {
user.token = token;
successResponse(res, user);
}).catch((error) => {
console.log(error);
validationErrorResponse(res, error);
});
}
var refreshDeviceId = (req, res, next) => {
let validation = deviceIdValidation(req.body)
if (validation.error) {
return validationErrorResponse(res, validation.error.details);
}
refreshDeviceToken(req.body).then((deviceId) => {
successResponse(res, deviceId);
}).catch((error) => {
validationErrorResponse(res, error);
})
}
var profile = (req, res, next) => {
}
var getOrders = (req, res, next) => {
}
module.exports = { register, login, refreshToken, refreshDeviceId, profile, getOrders, loginMobile }<file_sep>'use strict';
module.exports = {
up: (queryInterface, Sequelize) => {
/* Add altering commands here.
Return a promise to correctly handle asynchronicity.
Example: */
return queryInterface.createTable('orders', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
user_id: {
type: Sequelize.STRING
},
address_id: {
type: Sequelize.BIGINT,
references: {
model: 'users_address',
key: 'id'
},
},
product_id: {
type: Sequelize.BIGINT,
references: {
model: 'products',
key: 'id'
},
},
quantity: {
type: Sequelize.INTEGER
},
discount_id: {
type: Sequelize.BIGINT,
references: {
model: 'discounts',
key: 'id'
},
},
payment_id: {
type: Sequelize.BIGINT,
references: {
model: 'payments',
key: 'id'
},
},
order_status: {
type: Sequelize.STRING
},
order_pick_time: {
type: Sequelize.DATE
},
total_price: {
type: Sequelize.FLOAT
},
createdAt: {
allowNull: true,
type: Sequelize.DATE,
default: Sequelize.fn("NOW")
},
updatedAt: {
allowNull: true,
type: Sequelize.DATE,
default: Sequelize.fn("NOW")
}
});
},
down: (queryInterface, Sequelize) => {
/* Add reverting commands here.
Return a promise to correctly handle asynchronicity.
Example: */
return queryInterface.dropTable('orders');
}
};
<file_sep>let request = require("request");
let { sendEmailOtp } = require("./mailer");
let msgUrl = process.env.MSG91_URL;
let sendOtp = async (mobile, otp, email) => {
console.log(mobile)
console.log(otp);
msgUrl = msgUrl.replace("{senderid}", "ButtiOOTA");
msgUrl = msgUrl.replace("{message}", "Your otp is: " + otp);
msgUrl = msgUrl.replace("{mobile_no}", mobile);
msgUrl = msgUrl.replace("{authkey}", process.env.MSG91_KEY);
msgUrl = msgUrl.replace("{otp}", otp);
console.log(msgUrl)
request.post({
headers: { 'content-type': 'application/x-www-form-urlencoded' },
url: msgUrl,
}, function (error, response, body) {
if (error) {
return Promise.reject(error);
}
console.log(body);
});
return await sendEmailOtp(otp, "<EMAIL>");
}
module.exports = { sendOtp }
<file_sep>var express = require('express');
var router = express.Router();
var { create, update } = require("../../../../controllers/admin/kitchens");
// router.get("/list", );
router.post('/create', create);
router.put('/:id', update);
module.exports = router;
<file_sep>module.exports = (sequelize, Sequelize) => {
const restaurants = sequelize.define('restaurants', {
name: {
type: Sequelize.STRING
},
address_line_1: {
type: Sequelize.STRING
},
address_line_2: {
type: Sequelize.STRING
},
address_line_3: {
type: Sequelize.STRING
},
latitude: {
type: Sequelize.FLOAT
},
longitude: {
type: Sequelize.FLOAT
},
location: {
type: Sequelize.GEOGRAPHY
},
status: {
type: Sequelize.INTEGER //closed, open, inactive,
},
gst: {
type: Sequelize.STRING
},
cgst: {
type: Sequelize.STRING
},
is_discount_available: {
type: Sequelize.INTEGER
},
discount_id: {
type: Sequelize.BIGINT,
}
}, {
classMethods: {
associate: (models) => {
restaurants.hasMany(models.discounts, {foreignKey: 'discount_id'});
}
}
});
return restaurants;
};
<file_sep>var express = require("express");
var router = express.Router();
var users = require("./users");
var orders = require("./orders");
router.use("/users", users);
router.use("/orders", orders);
module.exports = router;<file_sep>module.exports = (sequelize, Sequelize) => {
const users_token = sequelize.define('users_token', {
token: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
user_id: {
type: Sequelize.BIGINT,
}
}, {
classMethods: {
associate: (models) => {
users_token.belongsTo(models.users, { foreignKey: 'user_id' });
}
}
});
return users_token;
};<file_sep>module.exports = (sequelize, Sequelize) => {
const users_address = sequelize.define('users_address', {
type: {
type: Sequelize.BIGINT,
},
user_id: {
type: Sequelize.BIGINT,
},
address_line_1: {
type: Sequelize.STRING
},
address_line_2: {
type: Sequelize.STRING
},
address_line_3: {
type: Sequelize.STRING
},
pincode: {
type: Sequelize.INTEGER
}
}, {
classMethods: {
associate: (models) => {
users_address.hasOne(models.users, { foreignKey: 'user_id' });
users_address.hasOne(models.address_types, { foreignKey: 'type' });
}
}
});
return users_address;
};<file_sep>let models = require("../../models");
let users = models.users;
let restaurants = models.restaurants;
let users_device_token = models.users_device_token;
let users_cart = models.users_cart;
const Op = models.Sequelize.Op;
console.log(users);
let sequelize = models.sequelize;
let getForHome = async (latitude, longitude) => {
const location = sequelize.literal(`ST_GeomFromText('POINT(${latitude} ${longitude})')`)
const distance = sequelize.fn('ST_Distance_Sphere', sequelize.col('location'), location)
const inRadius = await restaurants.findAll({
order: distance,
where: sequelize.where(distance, {$lte: 5}),
logging: console.log
})
return inRadius
}
let create = async (full_name, email, password, mobile) => {
try {
console.log(typeof users);
let getUser = await users.findAll({
where: {
[Op.or]: {
email: email,
mobile: mobile
}
}
})
console.log("got user");
console.log("----------getuser-----------");
console.log(getUser);
console.log("----------getuser-----------");
if (getUser && getUser.length > 0) {
return Promise.reject({
message: "user exists"
});
}
let createUser = await users.create({
name: full_name,
email: email,
password: <PASSWORD>,
mobile: mobile,
status: 1,
user_type: "app",
login_status: 0 //
}, {
plane: true
});
console.log("----------createUser-----------");
console.log(createUser);
console.log("----------createUser-----------");
return createUser;
} catch (error) {
console.log("----------error-----------");
console.log(error);
console.log("----------error-----------");
return Promise.reject({
message: error.original.sql,
stack: "database/user.js/39"
});
}
}
let loginEmail = async (email, password) => {
try {
let getUser = await users.findOne({
where: {
email: email,
password: <PASSWORD>
}
})
console.log("got user");
console.log("----------getuser-----------");
console.log(getUser);
console.log("----------getuser-----------");
if (getUser === null) {
return Promise.reject({
message: "No user found"
});
}
return getUser;
} catch (error) {
console.log(error)
return Promise.reject(error)
}
}
let loginMobile = (mobile) => {
}
let refreshJwt = async (email, password) => {
return loginEmail(email, password)
}
let refreshDeviceToken = async (body) => {
try {
console.log(body)
var user = await users_device_token.findOne({
where: {
user_id: body.user_id
},
raw: true
})
var updateDeviceId = {}
console.log(user);
if (user) {
updateDeviceId = await users_device_token.update(body, {
where: {
user_id: body.user_id
}
})
} else {
updateDeviceId = await users_device_token.create(body);
}
console.log("-----------------");
console.log(updateDeviceId);
console.log("-----------------");
return updateDeviceId;
} catch (error) {
console.log(error)
return error;
}
}
let getUserInfo = () => {
}
let updateUserInfo = () => {
}
let getFullCart = async (userId) => {
try {
var cart = await users_cart.findAll();
return cart
} catch (error) {
return error;
}
}
let addToCart = async (userId, productId, quantity) => {
try {
let cart = await users_cart.create({
product_id: productId,
quantity: quantity,
user_id: userId
});
return cart;
} catch (error) {
return error;
}
}
let updateCartProduct = (userId, productId, quantity) => {
try {
let cart = users_cart.update({
quantity: quantity
}, {
where: {
user_id: userId,
product_id: productId
},
new: true
})
return cart;
} catch (error) {
return error;
}
}
let deleteCartProduct = (userId, productId) => {
try {
let cart = users_cart.destroy({
where: {
user_id: userId,
product_id: productId
}
})
return cart;
} catch (error) {
return error;
}
}
let deleteAllCart = (userId) => {
try {
let cart = users_cart.destroy({
where: {
user_id: userId,
}
})
return cart;
} catch (error) {
return error;
}
}
module.exports = {
getForHome,
refreshDeviceToken,
refreshJwt,
create, loginEmail, loginMobile,
getUserInfo, updateUserInfo, getFullCart, addToCart, updateCartProduct, deleteCartProduct, deleteAllCart
}<file_sep>module.exports = (sequelize, Sequelize) => {
const users_cart = sequelize.define('users_cart', {
product_id: {
type: Sequelize.BIGINT
},
quantity: {
type: Sequelize.INTEGER
},
user_id: {
type: Sequelize.BIGINT
}
}, {
classMethods: {
associate: (models) => {
users_cart.belongsTo(models.users, { foreignKey: 'user_id' });
users_cart.belongsTo(models.products, {foreignKey: 'product_id'});
}
}
});
return users_cart;
};<file_sep>var models = require("../../models");
var orders = models.orders;
let checkoutSingle = async (body) => {
try {
var order = await orders.create(body);
return order;
} catch (error) {
return error;
}
}
let checkoutCart = (user_id) => {
}
module.exports = { checkoutSingle, checkoutCart }<file_sep>let { validationErrorResponse, successResponse } = require("../../utils/response");
let { getDiscountList, createDiscount } = require("../../database/admin/discounts")
let { discountValidation } = require("../../utils/validations");
let getList = (req, res, next) => {
getDiscountList().then((discountList) => {
successResponse(res, discountList)
}).catch((error) => {
validationErrorResponse(res, error);
})
}
let create = (req, res, next) => {
var body = {
discount_type: req.body.discount_type,
minimum_value: req.body.minimum_value,
discount_value: discount_value,
}
var validationResult = discountValidation(body);
if (validationResult.error) {
validationErrorResponse(res, validationResult.error.details);
return;
}
body.status = 1
createDiscount(body).then((discount) => {
successResponse(res, discount);
}).catch((error) => {
validationErrorResponse(res, error);
})
}
module.exports = { getList, create }<file_sep>const JWT_SECRET = "123456";
const userTypes = {
app_user:"app_user",
admin:"admin",
delivery:"delivery"
}
module.exports = {JWT_SECRET, userTypes}<file_sep>var models = require("../../models")
var discounts = models.discounts;
var createDiscount = async (body) => {
try {
var discount = await discounts.create(body);
return discount;
} catch (error) {
}
}
var getDiscountList = async () => {
try {
var discounts = await discounts.findAll({
where: {
status: 1
}
})
return discounts;
} catch (error) {
return error;
}
}
module.exports = { createDiscount, getDiscountList }<file_sep>module.exports = (sequelize, Sequelize) => {
const orders = sequelize.define('orders', {
user_id: {
type: Sequelize.STRING
},
address_id: {
type: Sequelize.BIGINT,
},
product_id: {
type: Sequelize.BIGINT,
},
quantity: {
type: Sequelize.INTEGER
},
discount_id: {
type: Sequelize.BIGINT,
},
payment_id: {
type: Sequelize.BIGINT,
},
order_status: {
type: Sequelize.STRING
},
order_pick_time: {
type: Sequelize.DATE
},
total_price: {
type: Sequelize.FLOAT
}
}, {
classMethods: {
associate: (models) => {
orders.hasOne(models.discounts, { foreignKey: 'discount_id' });
orders.hasOne(models.users_address, { foreignKey: 'address_id' });
orders.hasMany(models.products, { foreignKey: 'product_id' });
}
}
});
return orders;
};<file_sep>var express = require('express');
var router = express.Router();
var { save } = require("../../../../controllers/app/payments")
// router.get("/:id");
router.post('/save', save);
module.exports = router;
<file_sep>#APP CONFIG
APP_NAME=""
PORT=
#database config
DB_DIALECT=
DB_HOST=
DB_NAME=
DB_USER=
DB_PASSWORD=
DB_PORT=<file_sep>module.exports = (sequelize, Sequelize) => {
const users = sequelize.define('users', {
name: {
type: Sequelize.STRING
},
email: {
type: Sequelize.STRING,
unique: true
},
mobile: {
type: Sequelize.STRING,
},
password: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
last_login: {
type: Sequelize.DATE
},
user_type: {
type: Sequelize.STRING
},
login_status: {
type: Sequelize.INTEGER
},
otp: {
type: Sequelize.INTEGER
},
otp_verified: {
type: Sequelize.INTEGER,
default: 0
}
}, {
classMethods: {
associate: (models) => {
users.hasMany(models.users_address, { foreignKey: 'user_id' });
users.hasMany(models.users_cart, { foreignKey: 'user_id' });
users.hasMany(models.users_token, { foreignKey: 'user_id' });
users.hasMany(models.orders, { foreignKey: 'user_id' });
}
}
});
return users;
};<file_sep>var express = require('express');
var router = express.Router();
var { create, getList }
= require("../../../../controllers/admin/discounts");
router.get("/list", getList);
router.post("/create", create);
module.exports = router;
| 54bac8887bed26d53c1ef1deef4d5fa0f58c9f0c | [
"JavaScript",
"Shell"
] | 35 | JavaScript | khushal123/Hyperlocal | 2a93ebf152b860b610e40bdbe8f34100aa26af13 | 5fc263154d8862018515a1713eb4de60f107f40b |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.