code
stringlengths 1
1.05M
| repo_name
stringlengths 6
83
| path
stringlengths 3
242
| language
stringclasses 222
values | license
stringclasses 20
values | size
int64 1
1.05M
|
|---|---|---|---|---|---|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//编写一个reverse()函数,该函数的参数是一个数组,
//在函数中会对数组中的元素顺序进行反转,并返回反转后的数组。
function reverse(arr) {
// 创建一个新数组,其中的元素顺序与原数组相反
let reverArr = [];
for (let i = arr.length - 1; i >= 0; i--) {
reverArr.push(arr[i]);
}
return reverArr;
}
// 测试函数
let startArr = [1, 2, 3, 4, 5];
let reverArr = reverse(startArr);
console.log("原始数组:", startArr); // 输出原始数组
console.log("反转后的数组:", reverArr); // 输出反转后的数组
</script>
</html>
|
2302_79957586/JavaScript
|
3.html
|
HTML
|
unknown
| 880
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//编写一个isLeapYear()函数,该函数的参数是一个年份数字,
//利用isLeapYear()函数判断年份是否为闰年,
//如果年份是闰年,则返回值为true,否则返回值为false。
let year=prompt("请输入年份");
function isLeapYear(){
if(year%400==0||year%100!=0&&year%4==0){
return true;
}else{
return false;
}
}
console.log(isLeapYear());
</script>
</html>
|
2302_79957586/JavaScript
|
4.html
|
HTML
|
unknown
| 707
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//编写一个fn()函数,该函数调用后会弹出一个输入框,
//要求用户输入一个年份数字,当用户输入年份数字后,
//程序会提示用户该年份的2月份天数(闰年29天,平年28天)
function fn(){
let year=prompt('请输入年份');
if(year%4===0||year%100!==0&&year%400===0){
alert(year+'年的2月份有29天');
}else{
alert(year+'年的2月份有28天');
}
}
fn();
</script>
</html>
|
2302_79957586/JavaScript
|
5.html
|
HTML
|
unknown
| 746
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//编写一个函数用于判断用户输入的n个值的最大值,
//Ps:每个用户输入的个数可能不一致。在做比较前需要用户确定输入几个值。
function findMaxOfUserInputs() {
// 询问用户要输入多少个值
let n = parseInt(prompt("请输入你想输入的值的个数: "), 10);
// 检查用户是否输入了有效的数字
if (isNaN(n) || n <= 0) {
alert("请输入一个正整数。");
return;
}
let values = [];
let maxValue = -Infinity; // 初始化为负无穷大,以便任何数字都会比它大
// 循环接收用户输入的值
for (let i = 0; i < n; i++) {
let value = parseFloat(prompt(`请输入第${i + 1}个值: `));
// 检查用户是否输入了有效的数字
if (isNaN(value)) {
alert("输入无效,请输入一个数字。");
return;
}
values.push(value);
// 更新最大值
if (value > maxValue) {
maxValue = value;
}
}
// 返回最大值
return maxValue;
}
// 调用函数并打印结果
let maxValue = findMaxOfUserInputs();
if (maxValue !== -Infinity) {
console.log("最大值是: " + maxValue);
}
</script>
</html>
|
2302_79957586/JavaScript
|
6.html
|
HTML
|
unknown
| 1,661
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
<p class="box1">飞流直下<strong>三千尺</strong></p>
<p class="box2">疑是银河落九天</p>
<div class="box3">蒹葭苍苍 白露为霜 <br>
所谓伊人 在水一方</div>
</body>
<script>
const em1 = document.querySelector('.box1');
console.log(em1.innerHTML); //获取
em1.innerHTML = '大数据,<em>大智慧<em/>,大未来'; //设置
console.log(em1.innerHTML);
em1.innerText = '飞流直下<strong>三千尺</strong>';
console.log(em1.textContent);
em1.textContent = '大数据,<em>大智慧<em/>,大未来';
const em2 = document.querySelector('.box3');
console.log(em2.innerHTML);
console.log(em2.innerText);
console.log(em2.textContent);
</script>
</html>
|
2302_79957586/JavaScript
|
DOMDemo02.html
|
HTML
|
unknown
| 964
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<style>
.box{
color: aqua;
font-weight: 80px;
}
</style>
<body>
<a href="#" class="lj" data-index="123">这是一个链接</a>
<img src="" alt="">
</body>
<script>
const em1 = document.querySelector('.lj');
console.log(em1.href);
em1.href = 'https://www.baidu.com'; //设置内置属性的值
em1.textContent = '百度一下';
em1.setAttribute('href','https://www.mi.com'); //设置内置属性的值
em1.setAttribute('data-index','456'); //设置自定义属性值
console.log(em1.getAttribute('data-index')); //获取自定义属性值
//element.dataset.属性 = '值',这里的属性只取data-后的自定义属性名
em1.dataset.index = '789';
console.log(em1.dataset.index); //获取自定义属性值
console.log(em1.dataset['index']); //获取自定义属性值
em1.removeAttribute('href'); //内置属性和自定义属性均使用此方法移除
console.log(em1.getAttribute('data-index'));
em1.style.color = 'red';
//所有css中带-的属性在这里一律改成:去掉-首字母大写。例:font-size --> fontSize
em1.style.fontSize = '34px'; //fontSize:font-size
em1.className = 'lj box';
</script>
</html>
|
2302_79957586/JavaScript
|
DOMDemo03.html
|
HTML
|
unknown
| 1,452
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
<!-- DOM:文档对象模型 -->
<!-- 准备标签 -->
<div class="box">hello</div>
<div class="box1">world</div>
<div class="box2">前端真好玩</div>
<div class="box3">跟着静姐学前端</div>
<p>使用DOM操作元素</p>
<p class="box">这是有类名的p标签</p>
<div id="box">
<h1>这是div中的一级标题</h1>
</div>
</body>
<script>
//获取页面元素:querySelector()和querySelectorAll()
//做认识:getElementById()、getElementByTagName()、getElementByName()、getElementByClassName()、
//let const
//querySelector()只会获取第一个元素:选择器可以是标签选择器、类选择器(.类名)和ID选择器(#ID名)
const elm1 = document.querySelector('div'); //通过标签选择
console.log(elm1);
const elm2 = document.querySelector('.box1'); //通过类选择器拿到元素
console.log(elm2);
const elm3 = document.querySelector('#box h1 li:nth-child(5)'); //复合选择器
console.log(elm3);
console.log('-----------');
const em1 = document.querySelectorAll('div');
console.log(em1); //伪数组
for (let i=0;i<em1.length;i++){
console.log(em1[i]);
}
em1[2].style.color='red';
</script>
</html>
|
2302_79957586/JavaScript
|
DOMdemo01.html
|
HTML
|
unknown
| 1,498
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//函数的定义
function say(){
//函数体
alert('hello js!');
}
//函数调用;可以调用多次
say();
say();
console.log('------------');
//函数的参数:形参、实参
function say2(name){
console.log('hello:'+name);
}
//调用
say2('阿黎');
say2('星星');
say2('阿林');
//找出两个最大的值
function getMax(num1,num2){
let max;
max=num1>num2?num1:num2;
console.log('最大值是'+max);
}
getMax(); //
getMax(12,78); //
getMax(0,'hello',true); //
getMax(-67,-58,0,0,6); //
console.log('---------');
function getMin(){
// console.log(arguments); //arguments:控制参数
// console.log(arguments.length);//参数个数
// console.log(arguments[0]);
let min;
//遍历所有参数:找最小值
for(let i=0;i<arguments.length;i++){
if(arguments[i]<min){
min=arguments[i];
}
}
min = arguments[0] < arguments[1]?arguments[0]:arguments[1];
return min;
}
console.log(getMin());
console.log(getMin(0));
console.log(getMin(1,2));
console.log(getMin(-1,-2,-3));
console.log(('hello','min','max'));//参数不合法:typeof... number
// getMin();
// getMin(0);
// getMin(1,2);
// getMin(1,2,3);
// getMin('hello','min','max');
//返回值:return
//return语句一般写在函数的结束位置,return语句后的代码不会执行
//作业:1.编写一个getArrMay()函数,利用该函数求数组[13,68,79,92,83]中的函数值
</script>
</html>
|
2302_79957586/JavaScript
|
MethodDemo.html
|
HTML
|
unknown
| 1,957
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//转字符型:数值、布尔、null、未定义
let num1 = 123,flag1 = true, flag2 = false,num2 = null,num3;
//String(),toString()
let str1= String(num1);
console.log(str1); //123
console.log(typeof(str1));
str1 = num1.toString();
console.log(str1); //123
console.log(typeof(str1));
console.log('-------');
console.log(String(flag1));
console.log(flag1.toString());
console.log(String(flag2));
console.log(flag2.toString());
console.log('-----------');
console.log(String(num2));
// console.log(num2.toString()); //null没有tostring()方法
console.log(String(num3));
//
</script>
</html>
|
2302_79957586/JavaScript
|
ToString.html
|
HTML
|
unknown
| 922
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
// day9.30
//数组名【索引】:索引从0开始
// let arr = [12,4,'dgf',false,null,34,,,,,124.54332];
// console.log(arr[5]);
// console.log(arr[6]);
// console.log(arr);
// console.log('---------');
// console.log(arr.length);
// console.log('---------');
// for(let i = 0;i<arr.length;i++){
// console.log(arr[i]);
// }
//day10.11
//数组:一组数据
/*
let arr = [45,78,21,456,-78,0];
console.log(arr[2]); //21
console.log(arr); //打印对象object
*/
//遍历数组
/*
for(let i = 0;i<arr.length;i++){
console.log(arr[i]);
}
*/
//找出成绩score中的最高分,并求出平均数。score[45,78,89,94,63,55,80,74];
let score = [45,78,89,94,63,55,80,74];
let max=score[0],avg,sum=0;
for(let i=0;i<score.length;i++){
if(score[i]>max){
max=score[i];
}
sum += score[i];
}
console.log('最高分是'+max);
avg=sum / score.length;
console.log('平均分是'+avg);
console.log(score.length);
score[47]=90;
console.log(score.length);
console.log(score);
delete score[4];
console.log(score);
</script>
</html>
|
2302_79957586/JavaScript
|
arrDemo.html
|
HTML
|
unknown
| 1,490
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
for(let i=0;i<10;i++){
if(i%2==0){
continue; //继续:结束当前循环,直接进行下一次循环
}
console.log(i); //1 3
}
console.log('-------');
for(let i=0;i<10;i++){
if(i%2==0){
break; //直接跳出循环
}
console.log(i);
}
//break
//1.嵌套循环1~100的数,外层从100往下减,内层从0往上增;
//2.在内层循环中跳过打印数值能被3整除的数;
//3.同时在外层循环和内层循环的数恰好相等时打印‘踩雷’结束所有循环。
//可以通过打标签的方式使代码结束到指定位置。
tag:
for(let a=100;a>=0;a--){
for(let b=0;b<=100;b++){
if(b%3==0){
if(a==b){
console.log(a+'=='+b);
console.log('踩雷');
break tag; //结束到标签tag的位置,标签名字自取(需满足标识符命名规则)
}
continue;
}
if(a==b){
console.log(a+'=='+b);
console.log('踩雷');
break tag;
}
console.log(a+':'+b);
}
}
//语句:if,else,else if,switch,case,break,for,while,do...while,continue
//流程图:阅读代码
</script>
</html>
|
2302_79957586/JavaScript
|
breDemo.html
|
HTML
|
unknown
| 1,586
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
letn1, n2 = 123, n3 = null, n4 = true, n5 = 'hello', n6 = '', n7 = 0;
console.log(Boolean(n1)); //将未定义转为布尔型
console.log(Boolean(n2)); //非0数值转布尔型
console.log(Boolean(n3)); //null转布尔型
console.log(Boolean(n5)); //非空字符转布尔
console.log(Boolean(n6)); //空字符串转布尔
console.log(Boolean(n7)); //0转布尔型
//未定义、null 、空字符串 、0 转为 字符串 结果为false
//非0数值 和 非空字符 转 布尔型 结果为true
console.log('---------');
console.log(Number(n6));
console.log(parseInt(n6));
console.log(parseFloat(n6));
let num = parseInt(n6);
console.log(num);
console.log(typeof(n6));
console.log(Boolean(num));
</script>
</html>
|
2302_79957586/JavaScript
|
changedemo.html
|
HTML
|
unknown
| 1,033
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
for(let i = 0;i<10;i++){
console.log(i);
}
//计算1~100的和并打印结果
let sum=0;
for(let i = 1;i<=100;i++){
sum+=i;
}
console.log(sum);
//计算1~100之间偶数的和
let even=0;
let odd=0;
for(let i=1;i<=100;i++){
if(i%2==0){
even +=i;
}
else {
odd +=i;
}
}
console.log('1~100之间所有偶数的和为'+even);
console.log('1~100之间所有奇数的和为'+odd);
//while循环和do...while循环
let a = 10;
while(a>10){
console.log('执行while循环');
}
let b = 10;
do{
console.log('执行do...while循环');
}while(b>10)
//嵌套循环
for(let i = 0;i<5;i++){
for (let j = 0;j<5;j++){
console.log(i+':'+j);
}
}
/*
1.打印效果:
*****
*****
*****
*****
2.打印九九乘法表并画出流程图
*/
</script>
</html>
|
2302_79957586/JavaScript
|
forDemo.html
|
HTML
|
unknown
| 1,239
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
/*
三元运算符
条件表达式?条件为真的代码块:条件为假的代码块
找两个数中的最大值
*/
// let a,b,max;
// a = prompt();
// b = prompt();
// max = a>b?a:b;
// console.log(max);
/*
判断闰年、平年
闰年:能被400整除,或能被4整除但不能被100整除
*/
let year;
year = prompt();
(year%400===0)||(year%4===0)&&(year%100!==0)?console.log(year+'是闰年'):console.log(year+'是平年');
</script>
</html>
|
2302_79957586/JavaScript
|
getMax.html
|
HTML
|
unknown
| 761
|
console.log('这是外部的js代码')
|
2302_79957586/JavaScript
|
helllo.js
|
JavaScript
|
unknown
| 38
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
alert('hello javascript!');
console.log('succed!')
</script>
</html>
|
2302_79957586/JavaScript
|
hello.html
|
HTML
|
unknown
| 297
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<!-- <script src="hello3.js"></script> -->
<script>
let num1=100;
let str1,str2 = 'hello' ;
const Username = 'admin';
</script>
</html>
|
2302_79957586/JavaScript
|
hello3.html
|
HTML
|
unknown
| 369
|
alert('hello');
|
2302_79957586/JavaScript
|
hello3.js
|
JavaScript
|
unknown
| 15
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//流程:顺序、分支
//判断:if,if...else...,if...else... if...else...
//比较两个数大小,大小相等的情况打印num1和num2相等
let num1,num2;
num1 = prompt();
num2 = prompt();
//判断两个值的大小
if(num1>num2){
console.log('num1大于num2');
}
else if(num1==num2){
console.log('num1等于num2');
}else{
console.log('num1小于num2');
}
//分数>=90甲等,90>分数>=80乙等,80>分数>=60丙等,60>分数不及格
let score = prompt();
if(score>=90){
console.log('甲等');
}else if(90>score>=80){
console.log('乙等');
}else if(80>score>=60){
console.log('丙等');
}else{
console.log('不合格');
}
//swich...case
//对输入的值(0~5)进行判定,将输入的值转换为二进制
let num = 3;
switch(num){
case 0:
console.log('转化为二进制是0000');
break;
case 1:
console.log('0001');
break;
case 2:
console.log('0010');
break;
case 3:
console.log('0011');
break;
case 4:
console.log('0100');
break;
case 5:
console.log('0101');
break;
default:
console.log('数据不合法');
}
</script>
</html>
|
2302_79957586/JavaScript
|
ifDemo.html
|
HTML
|
unknown
| 1,695
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
let str='';
for(let i=1;i<=9;i++){
for(let j=1;j<=i;j++){
str+= j +'x'+ i +'='+ i*j+'\t';
}
str += '\n';
}
console.log(str);
</script>
</html>
|
2302_79957586/JavaScript
|
jiujiu.html
|
HTML
|
unknown
| 421
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//函数声明、参数(形参、实参)(arguments)、返回值return
//函数表达式、匿名函数、立即执行函数
//回调函数、递归函数
//函数可以作为其他函数的返回值进行返回(返回的函数要进行调用,如果不调用返回的将是一个函数声明的整个函数)
function fn1(a,b){
console.log('fn1函数调用');
return a+b;
}
let fn2 = function(f){
console.log('fn2函数调用');
return f;
}
console.log(fn2(10));
console.log(fn2(fn1)); //返回整个函数
console.log('--------');
console.log(fn2(fn1(3,2))); //想要调用函数需传参数值
console.log(fn1(1,2));
//作用域:作用的范围
/*
js的运行分两个部分:
1.解析(编译):语法检查、声明变量和函数
2.执行阶段:对变量进行赋值、按照执行顺序进行执行
var和let的区别:
1.var和let声明变量:var可以先使用再声明,let不允许。var有声明提升;let声明提升后执行时会出现短暂性死区,因此会报错
2.var在同一个作用区域内声明的变量名可以相同,而let不允许
3.var和let都全局作用域和局部作用域(函数作用域)。
推荐使用let(ES6),使我们的代码更严谨!!
*/
//console.log(num1);
console.log(num2);
let num1;
var num2=10;
var num2='hello';
console.log(num2);
console.log('---------');
//全局作用域:全局变量
let a;
let b;
function myf(){
//局部作用域(函数作用域):局部变量
let c=3;
var d=4;
console.log(a);
console.log(b);
console.log(c);
console.log(d);
}
myf();
console.log(a);
console.log(b);
//console.log(c); //报错
//console.log(d); //报错
console.log('----块作用域----');
//在let中增加了一个块作用域{}
for(let i=0;i<3;i++){
console.log('i:'+i);
}
//console.log(i); //报错:离开块作用域
for(var j=0;j<3;j++){
console.log('j:'+j);
}
console.log(j);
</script>
</html>
|
2302_79957586/JavaScript
|
met2Demo.html
|
HTML
|
unknown
| 2,473
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
<!-- 点击按钮弹出警告‘你别点我’ -->
<input type="button" value="click" onclick="(function(userName){alert(userName+'你别点我');})('阿黎')"/>
</body>
<script>
//函数表达式:把函数定义直接赋值给一个变量
let f1 = function add(){
console.log('hello world');
}
//调用:变量名();
f1();
//带参数的函数表达式
let f2 = function add(a,b){
console.log(a+b); //11
}
f2(1,2); //3
//有返回值的函数表达式
let f3 = function add(a,b){
return a+b;
}
console.log(f3(3,4)); //7
console.log('--------------');
//console.log(f2(5,6)); //未定义
//匿名函数:没有函数名字
let myf1 = function(){
console.log('hello myf1');
}
myf1();
let myf2 = function(a,b){
return a<b?a:b; //三元运算符
}
console.log(myf2(-8,0));
//函数表达式注意的问题:一定要先声明,再调用
//把函数的声明看做一个整体,声明结束立即调用。匿名自调用函数
(function(){
console.log('hello world');
})();
(function(a,b){
console.log(a-b);
})(10,6);
console.log('--------');
console.log((function(){
return 0;
})());
let a = function(){
console.log('hello');
};
console.log(typeof a); //function、数组:object
console.log('--------');
//回调函数:把函数作为参数传给另一个参数
//把函数A作为参数传递给函数B,并在函数B中调用函数A。
let arg1 = function(){
return 0;
}
console.log(arg1);
let fun = function(a,b){
console.log(b*a());
}
fun(arg1,8888);
console.log('----------');
//递归函数:把函数作为参数传给参数本身。调用函数本身。
//注意:递归函数容易造成死循环
//阶乘:10!=10*9*8*7*6*...*1 > 10*9! > 10*9*(8!) >...10*9*8*7*6*5*4*3*2*(1!)
// let arg2 = function(a){
// return 0;
// }
// let arg3 = function(fn){
// return fn();
// }
// arg3(arg3);
let jc = function(n){
if(n===1){
return 1;
}else{
return n*jc(n-1);
}
}
console.log(jc(10));
</script>
</html>
|
2302_79957586/JavaScript
|
metDemo.html
|
HTML
|
unknown
| 2,597
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//console.log(Math.random()*45);
// function getRandomIntInclusive(min,max){
// const minCeiled = Math.ceil();
// const maxCeiled = Math.ceil();
// }
let arr = ['a','c','e'];
let array = new Array('a','f','ed','dgfg');
//arr[0]; //0~arr.length-1
let RanIndex = Math.floor(Math.random()*(arr.length));
console.log(arr[RanIndex]);
//随机点名,点过的名字不重复点,如果全部点完名推出的点名并提示用户
//猜数字,猜中交作业
let num;
function cai(min,max){
return Math.floor(Math.random()*(max-min)+1);
}
let boom = cai(1,10);
while(true){
num = prompt('输入你猜的数字');
if(num === boom){
alert('恭喜喜提作业');
break;
}else if (num > boom){
alert('你猜大了');
break;
}else if(num < boom){
alert('你猜小了');
break;
}
}
</script>
</html>
|
2302_79957586/JavaScript
|
obj2Demo.html
|
HTML
|
unknown
| 1,253
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//对象:应该具体的事物,一种复杂的数据,包括属性和方法
//属性:对象的特征,用变量描述
//方法:对象的行为(功能),用函数(方法)描述
//创建对象:用字面量创建对象,在{}中描述对象
let obj1 = {}; //空对象
console.log(obj1); //
console.log(typeof obj1); //object对象
let obj2 = {
//属性 属性名:值,多个属性之间用逗号隔开
useNeme:'admin',
password:'123',
//方法
login:function(){
console.log('登录成功');
}
}
//访问对象
// 对象.变量名或对象['变量名'] 访问对象的属性
console.log(obj2.useNeme);
console.log(obj2['password']);
//对象.方法名()或对象.['方法']() 调用对象的方法
obj2.login();
obj2['login']();
console.log('------------');
/*创建对象:用构造函数创建对象
构造函数:一种特殊的函数,通过关键new调用这个函数能构造出一个对象。
step1:定义构造函数,一般讲构造函数名的首字母大写;
step2:使用关键字new构造函数
*/
function Stu(name,sex,id){
//this关键字:这里
this.name = name; //把传入的实参name传递给当前对象中的属性name
this.sex = sex;
this.id = id;
this.show = function(){
console.log('我叫:'+this.name);
};
}
//关键字new
let student1 = new Stu('Lihua','男',123);
let student2 = new Stu('tom','女',456);
let student3 = new Stu('lily','女',99);
student1.show();
student2.show();
student3.show();
//创建对象:用object()创建对象
let obj3 = new Object(); //构造空对象
//添加属性和行为
obj3.name = 'root';
obj3.psd = '123';
obj3.login = function(){
//模板字符:`${变量名}`
console.log(`${obj3.name}登录成功`);
}
//访问对象
obj3.login();
//遍历对象for...in
//用字面量创建对象,对象中有title,date,author属性和read()方法,
let news = {
title:'xxx塌房',
date:'2024年10月28日',
author:'狗仔',
read:function(){
console.log('阅读新闻'+this.title);
}
};
news.read();
console.log('---------');
for(let temp in news){
console.log(temp); //遍历出对象中的属性名
if(typeof news[temp]==='function'){
news[temp]();
}else{
console.log(news[temp]); //获取对象中属性的值
}
}
//Math对象:不用实例化
let num = Math.floor(-2.12);//向下取整
console.log(num);
num = Math.random(); //取0~1之间的随机数
console.log(num);
</script>
</html>
|
2302_79957586/JavaScript
|
objDemo.html
|
HTML
|
unknown
| 3,045
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//算术运算符:
//自增x++:x=x+1; 自减x--:x=x-1;
let x = 1,y = 10,result;
result = ++x+(++y)+y+x--+x+x;
//2+11+11+2+1+1=28
//x=1 y=11
console.log(x);
console.log(y);
console.log(result);
console.log('--------')
//加号+两端中有字符时会自动将两个操作数拼接
let num1 = 123,num2 = 'hello';
let res = num1+num2;
console.log(res);
//赋值运算符
//+=: a+=b > a=a+b
let a=10,b=20; //补码:反码+1
console.log(a+=b);
// 位移:
let n1 = -9,n2 = 2;
console.log(n1>>n2);
let m1 = 9,m2 = 2;
console.log(m1<<m2);
//等于和全等于
let varb1 = 123, varb2 = '123';
console.log(varb1 == varb2);
console.log(varb1 === varb2);
</script>
</html>
|
2302_79957586/JavaScript
|
optDemo.html
|
HTML
|
unknown
| 1,059
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//冒泡排序
let arr=[1,3,2,6,5,4];
let temp;
//两层循环
for(let i=1;i<arr.length;i++){
for(let j=0;j<arr.length-i;j++){
if(arr[j]>arr[j+1]){
temp = arr[j];
arr[j]=arr[j+1];
arr[j+1]=temp;
}
}
}
console.log(arr);
//二维数组
let arr2=[[1,2,3],[4,5,6],[7,8,9]];
console.log(arr2[0][0]);//1
console.log(arr2[0][1]);//2
console.log(arr2[0][2]);//3
//循环遍历二维数组
let arr3=[[12,34,45],[23,34,2],[67,56,54]];
console.log(arr3[2][1]);
</script>
</html>
|
2302_79957586/JavaScript
|
sortDemo.html
|
HTML
|
unknown
| 866
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
// let num;
// num = 10;
// console.log(num);
// let str;
// console.log(str); //未定义
// let n1,n2,n3;
// let a = 20,c = 'sss'; //创建变量并赋值
// const userName = 'admin'; //常量
// console.log(userName);
// // userName = 'xiaoming'; //不能修改常量的值,执行后会报错
// console.log(userName);
let flag = true;
console.log(flag); //true
console.log(typeof(flag)); //数据类型
let num1 = 123, num2 = 0x23;
console.log(num1);
console.log(num2);
console.log(typeof(num1));
console.log(typeof(num2));
console.log('------------')
let str1 = '';str2 = 'hello';str3 = 'javascript';
console.log(str1);
console.log(str2);
console.log(str3);
console.log(typeof(str1));
console.log(typeof(str2));
console.log(typeof(str3));
//子曰:'学而时习之,不亦说乎'
//在双引号中使用双引号,或在单引号中使用双引号,需要用\进行转义
let str = "子曰(\"老子\"):'学而时习之,不亦说乎'";
console.log(str);
console.log('-------');
let n1 = null,n2;
console.log(n1);
console.log(n2);
</script>
</html>
|
2302_79957586/JavaScript
|
varDemo.html
|
HTML
|
unknown
| 1,469
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
<!-- 行内式 -->
<input type="button" value="点我一下" onclick="alert('这是点击后的效果')"/>
</body>
<!-- 如何在HTML中引入JavaScript代码
1.嵌入式
2.外链式
3.行内式
-->
<script>
alert('hello javascript');
</script>
<!-- 外链式 -->
<script src="helllo.js">
//使用外链式(scr)方式引入js代码后,不能在script标签中继续写js代码
//这里的js代码不会报错,但同时也不会被解释(执行)
console.log('这是嵌入式的js代码');
</script>
</html>
|
2302_79957586/JavaScript
|
yrDemo.html
|
HTML
|
unknown
| 757
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
</body>
<script>
//作用域链
//执行上下文:执行JS代码环境
let a = 10; //全局变量
function fn1(){
let b = 'hello';
function fn2(){
let c = true;
console.log(a);//10
console.log(b);//hello
console.log(c);//true
}
fn2();
}
fn1();
console.log('c:'+c);
</script>
</html>
|
2302_79957586/JavaScript
|
作用域链.html
|
HTML
|
unknown
| 611
|
#include <bits/stdc++.h>
using namespace std;
int main()
{
int t;
cin>>t;
while(t--)
{
int a[114514];
memset(a,0,sizeof(a));
int n;
bool f=true;
cin>>n;
// vector<int> a(n+1);
for(int i=1;i<=n;i++)
{
int tmp;
cin>>tmp;
a[tmp]++;
}
int ans=0;
for(int i=1;i<=1e6;i++)
{
ans+=a[i]*i;
if(ans>=n) break;
if(ans<i) {f=false;break;}
}
if(f) cout<<"Cool!\n";
else cout<<ans+1<<"\n";
}
}
|
2301_81652413/wqyhh
|
untitled1.cpp
|
C++
|
unknown
| 464
|
#include <bits/stdc++.h>
#include <bits/extc++.h>
#define int long long
#define Genshin return
#define Impact 0
using namespace __gnu_pbds;
using namespace std;
int n,m,fang[4][2]={{1,0},{0,1},{-1,0},{0,-1}},prx,pry,book[110][110],flag=0;
char a[110][110];
void bfs(int prx,int pry)
{
queue<pair<int,int> > q;
book[prx][pry]=1;
q.push({prx,pry});
while(!q.empty())
{
int i=q.front().first,j=q.front().second;
q.pop();
if(a[i][j]=='*') {flag=1;break;}
for(int k=0;k<4;k++)
{
if(i+fang[k][0]<=0||i+fang[k][0]>n||j+fang[k][1]<=0||j+fang[k][1]>m) continue;
if(book[i+fang[k][0]][j+fang[k][1]]||a[i+fang[k][0]][j+fang[k][1]]=='#') continue;
book[i+fang[k][0]][j+fang[k][1]]=1;
q.push({i+fang[k][0],j+fang[k][1]});
}
}
}
signed main()
{
cin>>n>>m;
for(int i=1;i<=n;i++) scanf("%s",a[i]+1);
for(int i=1;i<=n;i++)
{
for(int j=1;j<=m;j++)
{
if(a[i][j]=='#')
{
for(int k=0;k<4;k++)
{
if(i+fang[k][0]<=0||i+fang[k][0]>n||j+fang[k][1]<=0||j+fang[k][1]>m) continue;
if(a[i+fang[k][0]][j+fang[k][1]]=='#'||a[i+fang[k][0]][j+fang[k][1]]=='*') a[i][j]='*';
}
}
if(a[i][j]=='M') prx=i,pry=j;
}
}
bfs(prx,pry);
if(flag)
{
cout<<"yeah!";
int sum=0;
for(int i=1;i<=n;i++)
{
for(int j=1;j<=m;j++)
if(a[i][j]=='#') sum+=5;
else if(a[i][j]=='*') sum++;
}
cout<<"\n"<<sum;
}
else cout<<"Game over!";
//for(int i=1;i<=n;i++) {for(int j=1;j<=m;j++) cout<<a[i][j]<<" ";cout<<"\n";}
Genshin Impact;
}
|
2301_81652413/wqyhh
|
untitled2.cpp
|
C++
|
unknown
| 1,542
|
#include <bits/stdc++.h>
using namespace std;
int vis(int n){
int flag=1;
while(n){
int t=n%10;
if(flag%2==0){
if(t%2==1){
return 1;
}
}else{
if(t%2==0){
return 1;
}
}
n/=10;
flag++;
}
return 1;
}
int main(){
int n,z=0;
cin>>n;
for(int i=1;i<=n;i++){
if(vis(i)==1){
z++;
}
}
cout<<z;
return 0;
}
|
2301_81652413/wqyhh
|
好数.cpp
|
C++
|
unknown
| 379
|
VERSION = "5.0.0-dev"
PROJECT_NAME = "frappe-bench"
FRAPPE_VERSION = None
current_path = None
updated_path = None
LOG_BUFFER = []
def set_frappe_version(bench_path="."):
from .utils.app import get_current_frappe_version
global FRAPPE_VERSION
if not FRAPPE_VERSION:
FRAPPE_VERSION = get_current_frappe_version(bench_path=bench_path)
|
2302_79757062/bench
|
bench/__init__.py
|
Python
|
agpl-3.0
| 340
|
# imports - standard imports
import json
import logging
import os
import re
import shutil
import subprocess
import sys
import uuid
import tarfile
import typing
from collections import OrderedDict
from datetime import date
from functools import lru_cache
from pathlib import Path
from typing import Optional
from urllib.parse import urlparse
# imports - third party imports
import click
import git
import semantic_version as sv
# imports - module imports
import bench
from bench.exceptions import NotInBenchDirectoryError
from bench.utils import (
UNSET_ARG,
fetch_details_from_tag,
get_app_cache_extract_filter,
get_available_folder_name,
get_bench_cache_path,
is_bench_directory,
is_git_url,
is_valid_frappe_branch,
log,
run_frappe_cmd,
get_file_md5,
)
from bench.utils.bench import build_assets, install_python_dev_dependencies
from bench.utils.render import step
if typing.TYPE_CHECKING:
from bench.bench import Bench
logger = logging.getLogger(bench.PROJECT_NAME)
class AppMeta:
def __init__(self, name: str, branch: str = None, to_clone: bool = True):
"""
name (str): This could look something like
1. https://github.com/frappe/healthcare.git
2. git@github.com:frappe/healthcare.git
3. frappe/healthcare@develop
4. healthcare
5. healthcare@develop, healthcare@v13.12.1
References for Version Identifiers:
* https://www.python.org/dev/peps/pep-0440/#version-specifiers
* https://docs.npmjs.com/about-semantic-versioning
class Healthcare(AppConfig):
dependencies = [{"frappe/erpnext": "~13.17.0"}]
"""
self.name = name.rstrip("/")
self.remote_server = "github.com"
self.to_clone = to_clone
self.on_disk = False
self.use_ssh = False
self.from_apps = False
self.is_url = False
self.branch = branch
self.app_name = None
self.git_repo = None
self.is_repo = (
is_git_repo(app_path=get_repo_dir(self.name))
if os.path.exists(get_repo_dir(self.name))
else True
)
self.mount_path = os.path.abspath(
os.path.join(urlparse(self.name).netloc, urlparse(self.name).path)
)
self.setup_details()
def setup_details(self):
# support for --no-git
if not self.is_repo:
self.repo = self.app_name = self.name
return
# fetch meta from installed apps
if self.bench and os.path.exists(os.path.join(self.bench.name, "apps", self.name)):
self.mount_path = os.path.join(self.bench.name, "apps", self.name)
self.from_apps = True
self._setup_details_from_mounted_disk()
# fetch meta for repo on mounted disk
elif os.path.exists(self.mount_path):
self.on_disk = True
self._setup_details_from_mounted_disk()
# fetch meta for repo from remote git server - traditional get-app url
elif is_git_url(self.name):
self.is_url = True
self.__setup_details_from_git()
# fetch meta from new styled name tags & first party apps on github
else:
self._setup_details_from_name_tag()
if self.git_repo:
self.app_name = os.path.basename(os.path.normpath(self.git_repo.working_tree_dir))
else:
self.app_name = self.repo
def _setup_details_from_mounted_disk(self):
# If app is a git repo
self.git_repo = git.Repo(self.mount_path)
try:
self.__setup_details_from_git(self.git_repo.remotes[0].url)
if not (self.branch or self.tag):
self.tag = self.branch = self.git_repo.active_branch.name
except IndexError:
self.org, self.repo, self.tag = os.path.split(self.mount_path)[-2:] + (self.branch,)
except TypeError:
# faced a "a detached symbolic reference as it points" in case you're in the middle of
# some git shenanigans
self.tag = self.branch = None
def _setup_details_from_name_tag(self):
using_cached = bool(self.cache_key)
self.org, self.repo, self.tag = fetch_details_from_tag(self.name, using_cached)
self.tag = self.tag or self.branch
def __setup_details_from_git(self, url=None):
name = url if url else self.name
if name.startswith("git@") or name.startswith("ssh://"):
self.use_ssh = True
_first_part, _second_part = name.rsplit(":", 1)
self.remote_server = _first_part.split("@")[-1]
self.org, _repo = _second_part.rsplit("/", 1)
else:
protocal = "https://" if "https://" in name else "http://"
self.remote_server, self.org, _repo = name.replace(protocal, "").rsplit("/", 2)
self.tag = self.branch
self.repo = _repo.split(".")[0]
@property
def url(self):
if self.is_url or self.from_apps or self.on_disk:
return self.name
if self.use_ssh:
return self.get_ssh_url()
return self.get_http_url()
def get_http_url(self):
return f"https://{self.remote_server}/{self.org}/{self.repo}.git"
def get_ssh_url(self):
return f"git@{self.remote_server}:{self.org}/{self.repo}.git"
@lru_cache(maxsize=None)
class App(AppMeta):
def __init__(
self,
name: str,
branch: str = None,
bench: "Bench" = None,
soft_link: bool = False,
cache_key=None,
*args,
**kwargs,
):
self.bench = bench
self.soft_link = soft_link
self.required_by = None
self.local_resolution = []
self.cache_key = cache_key
self.pyproject = None
super().__init__(name, branch, *args, **kwargs)
@step(title="Fetching App {repo}", success="App {repo} Fetched")
def get(self):
branch = f"--branch {self.tag}" if self.tag else ""
shallow = "--depth 1" if self.bench.shallow_clone else ""
if not self.soft_link:
cmd = "git clone"
args = f"{self.url} {branch} {shallow} --origin upstream"
else:
cmd = "ln -s"
args = f"{self.name}"
fetch_txt = f"Getting {self.repo}"
click.secho(fetch_txt, fg="yellow")
logger.log(fetch_txt)
self.bench.run(
f"{cmd} {args}",
cwd=os.path.join(self.bench.name, "apps"),
)
@step(title="Archiving App {repo}", success="App {repo} Archived")
def remove(self, no_backup: bool = False):
active_app_path = os.path.join("apps", self.app_name)
if no_backup:
if not os.path.islink(active_app_path):
shutil.rmtree(active_app_path)
else:
os.remove(active_app_path)
log(f"App deleted from {active_app_path}")
else:
archived_path = os.path.join("archived", "apps")
archived_name = get_available_folder_name(
f"{self.app_name}-{date.today()}", archived_path
)
archived_app_path = os.path.join(archived_path, archived_name)
shutil.move(active_app_path, archived_app_path)
log(f"App moved from {active_app_path} to {archived_app_path}")
self.from_apps = False
self.on_disk = False
@step(title="Installing App {repo}", success="App {repo} Installed")
def install(
self,
skip_assets=False,
verbose=False,
resolved=False,
restart_bench=True,
ignore_resolution=False,
using_cached=False,
):
import bench.cli
from bench.utils.app import get_app_name
self.validate_app_dependencies()
verbose = bench.cli.verbose or verbose
app_name = get_app_name(self.bench.name, self.app_name)
if not resolved and self.app_name != "frappe" and not ignore_resolution:
click.secho(
f"Ignoring dependencies of {self.name}. To install dependencies use --resolve-deps",
fg="yellow",
)
install_app(
app=app_name,
tag=self.tag,
bench_path=self.bench.name,
verbose=verbose,
skip_assets=skip_assets,
restart_bench=restart_bench,
resolution=self.local_resolution,
using_cached=using_cached,
)
@step(title="Cloning and installing {repo}", success="App {repo} Installed")
def install_resolved_apps(self, *args, **kwargs):
self.get()
self.install(*args, **kwargs, resolved=True)
@step(title="Uninstalling App {repo}", success="App {repo} Uninstalled")
def uninstall(self):
self.bench.run(f"{self.bench.python} -m pip uninstall -y {self.name}")
def _get_dependencies(self):
from bench.utils.app import get_required_deps, required_apps_from_hooks
if self.on_disk:
required_deps = os.path.join(self.mount_path, self.app_name, "hooks.py")
try:
return required_apps_from_hooks(required_deps, local=True)
except IndexError:
return []
try:
required_deps = get_required_deps(self.org, self.repo, self.tag or self.branch)
return required_apps_from_hooks(required_deps)
except Exception:
return []
def update_app_state(self):
from bench.bench import Bench
bench = Bench(self.bench.name)
bench.apps.sync(
app_dir=self.app_name,
app_name=self.name,
branch=self.tag,
required=self.local_resolution,
)
def get_pyproject(self) -> Optional[dict]:
from bench.utils.app import get_pyproject
if self.pyproject:
return self.pyproject
apps_path = os.path.join(os.path.abspath(self.bench.name), "apps")
pyproject_path = os.path.join(apps_path, self.app_name, "pyproject.toml")
self.pyproject = get_pyproject(pyproject_path)
return self.pyproject
def validate_app_dependencies(self, throw=False) -> None:
pyproject = self.get_pyproject() or {}
deps: Optional[dict] = (
pyproject.get("tool", {}).get("bench", {}).get("frappe-dependencies")
)
if not deps:
return
for dep, version in deps.items():
validate_dependency(self, dep, version, throw=throw)
"""
Get App Cache
Since get-app affects only the `apps`, `env`, and `sites`
bench sub directories. If we assume deterministic builds
when get-app is called, the `apps/app_name` sub dir can be
cached.
In subsequent builds this would save time by not having to:
- clone repository
- install frontend dependencies
- building frontend assets
as all of this is contained in the `apps/app_name` sub dir.
Code that updates the `env` and `sites` subdirs still need
to be run.
"""
def get_app_path(self) -> Path:
return Path(self.bench.name) / "apps" / self.app_name
def get_app_cache_temp_path(self, is_compressed=False) -> Path:
cache_path = get_bench_cache_path("apps")
ext = "tgz" if is_compressed else "tar"
tarfile_name = f"{self.app_name}.{uuid.uuid4().hex}.{ext}"
return cache_path / tarfile_name
def get_app_cache_hashed_path(self, temp_path: Path) -> Path:
assert self.cache_key is not None
ext = temp_path.suffix[1:]
md5 = get_file_md5(temp_path)
tarfile_name = f"{self.app_name}.{self.cache_key}.md5-{md5}.{ext}"
return temp_path.with_name(tarfile_name)
def get_cached(self) -> bool:
if not self.cache_key:
return False
if not (cache_path := validate_cache_and_get_path(self.app_name, self.cache_key)):
return False
app_path = self.get_app_path()
if app_path.is_dir():
shutil.rmtree(app_path)
click.secho(
f"Bench app-cache: extracting {self.app_name} from {cache_path.as_posix()}",
)
mode = "r:gz" if cache_path.suffix.endswith(".tgz") else "r"
with tarfile.open(cache_path, mode) as tar:
extraction_filter = get_app_cache_extract_filter(count_threshold=150_000)
try:
tar.extractall(app_path.parent, filter=extraction_filter)
click.secho(
f"Bench app-cache: extraction succeeded for {self.app_name}",
fg="green",
)
except Exception:
message = f"Bench app-cache: extraction failed for {self.app_name}"
click.secho(
message,
fg="yellow",
)
logger.exception(message)
shutil.rmtree(app_path)
return False
return True
def set_cache(self, compress_artifacts=False) -> bool:
if not self.cache_key:
return False
app_path = self.get_app_path()
if not app_path.is_dir():
return False
cwd = os.getcwd()
cache_path = self.get_app_cache_temp_path(compress_artifacts)
mode = "w:gz" if compress_artifacts else "w"
message = f"Bench app-cache: caching {self.app_name}"
if compress_artifacts:
message += " (compressed)"
click.secho(message)
self.prune_app_directory()
success = False
os.chdir(app_path.parent)
try:
with tarfile.open(cache_path, mode) as tar:
tar.add(app_path.name)
hashed_path = self.get_app_cache_hashed_path(cache_path)
unlink_no_throw(hashed_path)
cache_path.rename(hashed_path)
click.secho(
f"Bench app-cache: caching succeeded for {self.app_name} as {hashed_path.as_posix()}",
fg="green",
)
success = True
except Exception as exc:
log(f"Bench app-cache: caching failed for {self.app_name} {exc}", level=3)
success = False
finally:
os.chdir(cwd)
return success
def prune_app_directory(self):
app_path = self.get_app_path()
if can_frappe_use_cached(self):
remove_unused_node_modules(app_path)
def coerce_url_to_name_if_possible(git_url: str, cache_key: str) -> str:
app_name = os.path.basename(git_url)
if can_get_cached(app_name, cache_key):
return app_name
return git_url
def can_get_cached(app_name: str, cache_key: str) -> bool:
"""
Used before App is initialized if passed `git_url` is a
file URL as opposed to the app name.
If True then `git_url` can be coerced into the `app_name` and
checking local remote and fetching can be skipped while keeping
get-app command params the same.
"""
if cache_path := get_app_cache_path(app_name, cache_key):
return cache_path.exists()
return False
def can_frappe_use_cached(app: App) -> bool:
min_frappe = get_required_frappe_version(app)
if not min_frappe:
return False
try:
return sv.Version(min_frappe) in sv.SimpleSpec(">=15.12.0")
except ValueError:
# Passed value is not a version string, it's an expression
pass
try:
"""
15.12.0 is the first version to support USING_CACHED,
but there is no way to check the last version without
support. So it's not possible to have a ">" filter.
Hence this excludes the first supported version.
"""
return sv.Version("15.12.0") not in sv.SimpleSpec(min_frappe)
except ValueError:
click.secho(
f"Bench app-cache: invalid value found for frappe version '{min_frappe}'",
fg="yellow",
)
# Invalid expression
return False
def validate_dependency(app: App, dep: str, req_version: str, throw=False) -> None:
dep_path = Path(app.bench.name) / "apps" / dep
if not dep_path.is_dir():
click.secho(f"Required frappe-dependency '{dep}' not found.", fg="yellow")
if throw:
sys.exit(1)
return
dep_version = get_dep_version(dep, dep_path)
if not dep_version:
return
if sv.Version(dep_version) not in sv.SimpleSpec(req_version):
click.secho(
f"Installed frappe-dependency '{dep}' version '{dep_version}' "
f"does not satisfy required version '{req_version}'. "
f"App '{app.name}' might not work as expected.",
fg="yellow",
)
if throw:
click.secho(f"Please install '{dep}{req_version}' first and retry", fg="red")
sys.exit(1)
def get_dep_version(dep: str, dep_path: Path) -> Optional[str]:
from bench.utils.app import get_pyproject
dep_pp = get_pyproject(str(dep_path / "pyproject.toml"))
version = dep_pp.get("project", {}).get("version")
if version:
return version
dinit_path = dep_path / dep / "__init__.py"
if not dinit_path.is_file():
return None
with dinit_path.open("r", encoding="utf-8") as dinit:
for line in dinit:
if not line.startswith("__version__ =") and not line.startswith("VERSION ="):
continue
version = line.split("=")[1].strip().strip("\"'")
if version:
return version
else:
break
return None
def get_required_frappe_version(app: App) -> Optional[str]:
pyproject = app.get_pyproject() or {}
# Reference: https://github.com/frappe/bench/issues/1524
req_frappe = (
pyproject.get("tool", {})
.get("bench", {})
.get("frappe-dependencies", {})
.get("frappe")
)
if not req_frappe:
click.secho(
"Required frappe version not set in pyproject.toml, "
"please refer: https://github.com/frappe/bench/issues/1524",
fg="yellow",
)
return req_frappe
def remove_unused_node_modules(app_path: Path) -> None:
"""
Erring a bit the side of caution; since there is no explicit way
to check if node_modules are utilized, this function checks if Vite
is being used to build the frontend code.
Since most popular Frappe apps use Vite to build their frontends,
this method should suffice.
Note: root package.json is ignored cause those usually belong to
apps that do not have a build step and so their node_modules are
utilized during runtime.
"""
for p in app_path.iterdir():
if not p.is_dir():
continue
package_json = p / "package.json"
if not package_json.is_file():
continue
node_modules = p / "node_modules"
if not node_modules.is_dir():
continue
can_delete = False
with package_json.open("r", encoding="utf-8") as f:
package_json = json.loads(f.read())
build_script = package_json.get("scripts", {}).get("build", "")
can_delete = "vite build" in build_script
if can_delete:
click.secho(
f"Bench app-cache: removing {node_modules.as_posix()}",
fg="yellow",
)
shutil.rmtree(node_modules)
def make_resolution_plan(app: App, bench: "Bench"):
"""
decide what apps and versions to install and in what order
"""
resolution = OrderedDict()
resolution[app.app_name] = app
for app_name in app._get_dependencies():
dep_app = App(app_name, bench=bench)
is_valid_frappe_branch(dep_app.url, dep_app.branch)
dep_app.required_by = app.name
if dep_app.app_name in resolution:
click.secho(f"{dep_app.app_name} is already resolved skipping", fg="yellow")
continue
resolution[dep_app.app_name] = dep_app
resolution.update(make_resolution_plan(dep_app, bench))
app.local_resolution = [repo_name for repo_name, _ in reversed(resolution.items())]
return resolution
def get_excluded_apps(bench_path="."):
try:
with open(os.path.join(bench_path, "sites", "excluded_apps.txt")) as f:
return f.read().strip().split("\n")
except OSError:
return []
def add_to_excluded_apps_txt(app, bench_path="."):
if app == "frappe":
raise ValueError("Frappe app cannot be excluded from update")
if app not in os.listdir("apps"):
raise ValueError(f"The app {app} does not exist")
apps = get_excluded_apps(bench_path=bench_path)
if app not in apps:
apps.append(app)
return write_excluded_apps_txt(apps, bench_path=bench_path)
def write_excluded_apps_txt(apps, bench_path="."):
with open(os.path.join(bench_path, "sites", "excluded_apps.txt"), "w") as f:
return f.write("\n".join(apps))
def remove_from_excluded_apps_txt(app, bench_path="."):
apps = get_excluded_apps(bench_path=bench_path)
if app in apps:
apps.remove(app)
return write_excluded_apps_txt(apps, bench_path=bench_path)
def get_app(
git_url,
branch=None,
bench_path=".",
skip_assets=False,
verbose=False,
overwrite=False,
soft_link=False,
init_bench=False,
resolve_deps=False,
cache_key=None,
compress_artifacts=False,
):
"""bench get-app clones a Frappe App from remote (GitHub or any other git server),
and installs it on the current bench. This also resolves dependencies based on the
apps' required_apps defined in the hooks.py file.
If the bench_path is not a bench directory, a new bench is created named using the
git_url parameter.
"""
import bench as _bench
import bench.cli as bench_cli
from bench.bench import Bench
from bench.utils.app import check_existing_dir
if urlparse(git_url).scheme == "file" and cache_key:
git_url = coerce_url_to_name_if_possible(git_url, cache_key)
bench = Bench(bench_path)
app = App(
git_url, branch=branch, bench=bench, soft_link=soft_link, cache_key=cache_key
)
git_url = app.url
repo_name = app.repo
branch = app.tag
bench_setup = False
restart_bench = not init_bench
frappe_path, frappe_branch = None, None
if resolve_deps:
resolution = make_resolution_plan(app, bench)
click.secho("Following apps will be installed", fg="bright_blue")
for idx, app in enumerate(reversed(resolution.values()), start=1):
print(
f"{idx}. {app.name} {f'(required by {app.required_by})' if app.required_by else ''}"
)
if "frappe" in resolution:
# Todo: Make frappe a terminal dependency for all frappe apps.
frappe_path, frappe_branch = resolution["frappe"].url, resolution["frappe"].tag
if not is_bench_directory(bench_path):
if not init_bench:
raise NotInBenchDirectoryError(
f"{os.path.realpath(bench_path)} is not a valid bench directory. "
"Run with --init-bench if you'd like to create a Bench too."
)
from bench.utils.system import init
bench_path = get_available_folder_name(f"{app.repo}-bench", bench_path)
init(
path=bench_path,
frappe_path=frappe_path,
frappe_branch=frappe_branch or branch,
)
os.chdir(bench_path)
bench_setup = True
if bench_setup and bench_cli.from_command_line and bench_cli.dynamic_feed:
_bench.LOG_BUFFER.append(
{
"message": f"Fetching App {repo_name}",
"prefix": click.style("⏼", fg="bright_yellow"),
"is_parent": True,
"color": None,
}
)
if resolve_deps:
install_resolved_deps(
bench,
resolution,
bench_path=bench_path,
skip_assets=skip_assets,
verbose=verbose,
)
return
if app.get_cached():
app.install(
verbose=verbose,
skip_assets=skip_assets,
restart_bench=restart_bench,
using_cached=True,
)
return
dir_already_exists, cloned_path = check_existing_dir(bench_path, repo_name)
to_clone = not dir_already_exists
# application directory already exists
# prompt user to overwrite it
if dir_already_exists and (
overwrite
or click.confirm(
f"A directory for the application '{repo_name}' already exists. "
"Do you want to continue and overwrite it?"
)
):
app.remove()
to_clone = True
if to_clone:
app.get()
if (
to_clone
or overwrite
or click.confirm("Do you want to reinstall the existing application?")
):
app.install(verbose=verbose, skip_assets=skip_assets, restart_bench=restart_bench)
app.set_cache(compress_artifacts)
def install_resolved_deps(
bench,
resolution,
bench_path=".",
skip_assets=False,
verbose=False,
):
from bench.utils.app import check_existing_dir
if "frappe" in resolution:
# Terminal dependency
del resolution["frappe"]
for repo_name, app in reversed(resolution.items()):
existing_dir, path_to_app = check_existing_dir(bench_path, repo_name)
if existing_dir:
is_compatible = False
try:
installed_branch = bench.apps.states[repo_name]["resolution"]["branch"].strip()
except Exception:
installed_branch = (
subprocess.check_output(
"git rev-parse --abbrev-ref HEAD", shell=True, cwd=path_to_app
)
.decode("utf-8")
.rstrip()
)
try:
if app.tag is None:
current_remote = (
subprocess.check_output(
f"git config branch.{installed_branch}.remote", shell=True, cwd=path_to_app
)
.decode("utf-8")
.rstrip()
)
default_branch = (
subprocess.check_output(
f"git symbolic-ref refs/remotes/{current_remote}/HEAD",
shell=True,
cwd=path_to_app,
)
.decode("utf-8")
.rsplit("/")[-1]
.strip()
)
is_compatible = default_branch == installed_branch
else:
is_compatible = installed_branch == app.tag
except Exception:
is_compatible = False
prefix = "C" if is_compatible else "Inc"
click.secho(
f"{prefix}ompatible version of {repo_name} is already installed",
fg="green" if is_compatible else "red",
)
app.update_app_state()
if click.confirm(
f"Do you wish to clone and install the already installed {prefix}ompatible app"
):
click.secho(f"Removing installed app {app.name}", fg="yellow")
shutil.rmtree(path_to_app)
else:
continue
app.install_resolved_apps(skip_assets=skip_assets, verbose=verbose)
def new_app(app, no_git=None, bench_path="."):
if bench.FRAPPE_VERSION in (0, None):
raise NotInBenchDirectoryError(
f"{os.path.realpath(bench_path)} is not a valid bench directory."
)
# For backwards compatibility
app = app.lower().replace(" ", "_").replace("-", "_")
if app[0].isdigit() or "." in app:
click.secho(
"App names cannot start with numbers(digits) or have dot(.) in them", fg="red"
)
return
apps = os.path.abspath(os.path.join(bench_path, "apps"))
args = ["make-app", apps, app]
if no_git:
if bench.FRAPPE_VERSION < 14:
click.secho("Frappe v14 or greater is needed for '--no-git' flag", fg="red")
return
args.append(no_git)
logger.log(f"creating new app {app}")
run_frappe_cmd(*args, bench_path=bench_path)
install_app(app, bench_path=bench_path)
def install_app(
app,
tag=None,
bench_path=".",
verbose=False,
no_cache=False,
restart_bench=True,
skip_assets=False,
resolution=UNSET_ARG,
using_cached=False,
):
import bench.cli as bench_cli
from bench.bench import Bench
install_text = f"Installing {app}"
click.secho(install_text, fg="yellow")
logger.log(install_text)
if resolution == UNSET_ARG:
resolution = []
bench = Bench(bench_path)
conf = bench.conf
verbose = bench_cli.verbose or verbose
quiet_flag = "" if verbose else "--quiet"
cache_flag = "--no-cache-dir" if no_cache else ""
app_path = os.path.realpath(os.path.join(bench_path, "apps", app))
bench.run(
f"{bench.python} -m pip install {quiet_flag} --upgrade -e {app_path} {cache_flag}"
)
if conf.get("developer_mode"):
install_python_dev_dependencies(apps=app, bench_path=bench_path, verbose=verbose)
if not using_cached and os.path.exists(os.path.join(app_path, "package.json")):
yarn_install = "yarn install --check-files"
if verbose:
yarn_install += " --verbose"
bench.run(yarn_install, cwd=app_path)
bench.apps.sync(app_name=app, required=resolution, branch=tag, app_dir=app_path)
if not skip_assets:
build_assets(bench_path=bench_path, app=app, using_cached=using_cached)
if restart_bench:
# Avoiding exceptions here as production might not be set-up
# OR we might just be generating docker images.
bench.reload(_raise=False)
def pull_apps(apps=None, bench_path=".", reset=False):
"""Check all apps if there no local changes, pull"""
from bench.bench import Bench
from bench.utils.app import get_current_branch, get_remote
bench = Bench(bench_path)
rebase = "--rebase" if bench.conf.get("rebase_on_pull") else ""
apps = apps or bench.apps
excluded_apps = bench.excluded_apps
# check for local changes
if not reset:
for app in apps:
if app in excluded_apps:
print(f"Skipping reset for app {app}")
continue
app_dir = get_repo_dir(app, bench_path=bench_path)
if os.path.exists(os.path.join(app_dir, ".git")):
out = subprocess.check_output("git status", shell=True, cwd=app_dir)
out = out.decode("utf-8")
if not re.search(r"nothing to commit, working (directory|tree) clean", out):
print(
f"""
Cannot proceed with update: You have local changes in app "{app}" that are not committed.
Here are your choices:
1. Merge the {app} app manually with "git pull" / "git pull --rebase" and fix conflicts.
2. Temporarily remove your changes with "git stash" or discard them completely
with "bench update --reset" or for individual repositries "git reset --hard"
3. If your changes are helpful for others, send in a pull request via GitHub and
wait for them to be merged in the core."""
)
sys.exit(1)
for app in apps:
if app in excluded_apps:
print(f"Skipping pull for app {app}")
continue
app_dir = get_repo_dir(app, bench_path=bench_path)
if os.path.exists(os.path.join(app_dir, ".git")):
remote = get_remote(app)
if not remote:
# remote is False, i.e. remote doesn't exist, add the app to excluded_apps.txt
add_to_excluded_apps_txt(app, bench_path=bench_path)
print(
f"Skipping pull for app {app}, since remote doesn't exist, and"
" adding it to excluded apps"
)
continue
if not bench.conf.get("shallow_clone") or not reset:
is_shallow = os.path.exists(os.path.join(app_dir, ".git", "shallow"))
if is_shallow:
s = " to safely pull remote changes." if not reset else ""
print(f"Unshallowing {app}{s}")
bench.run(f"git fetch {remote} --unshallow", cwd=app_dir)
branch = get_current_branch(app, bench_path=bench_path)
logger.log(f"pulling {app}")
if reset:
reset_cmd = f"git reset --hard {remote}/{branch}"
if bench.conf.get("shallow_clone"):
bench.run(f"git fetch --depth=1 --no-tags {remote} {branch}", cwd=app_dir)
bench.run(reset_cmd, cwd=app_dir)
bench.run("git reflog expire --all", cwd=app_dir)
bench.run("git gc --prune=all", cwd=app_dir)
else:
bench.run("git fetch --all", cwd=app_dir)
bench.run(reset_cmd, cwd=app_dir)
else:
bench.run(f"git pull {rebase} {remote} {branch}", cwd=app_dir)
bench.run('find . -name "*.pyc" -delete', cwd=app_dir)
def use_rq(bench_path):
bench_path = os.path.abspath(bench_path)
celery_app = os.path.join(bench_path, "apps", "frappe", "frappe", "celery_app.py")
return not os.path.exists(celery_app)
def get_repo_dir(app, bench_path="."):
return os.path.join(bench_path, "apps", app)
def is_git_repo(app_path):
try:
git.Repo(app_path, search_parent_directories=False)
return True
except git.exc.InvalidGitRepositoryError:
return False
def install_apps_from_path(path, bench_path="."):
apps = get_apps_json(path)
for app in apps:
get_app(
app["url"],
branch=app.get("branch"),
bench_path=bench_path,
skip_assets=True,
)
def get_apps_json(path):
import requests
if path.startswith("http"):
r = requests.get(path)
return r.json()
with open(path) as f:
return json.load(f)
def is_cache_hash_valid(cache_path: Path) -> bool:
parts = cache_path.name.split(".")
if len(parts) < 2 or not parts[-2].startswith("md5-"):
return False
md5 = parts[-2].split("-")[1]
return get_file_md5(cache_path) == md5
def unlink_no_throw(path: Path):
if not path.exists():
return
try:
path.unlink(True)
except Exception:
pass
def get_app_cache_path(app_name: str, cache_key: str) -> "Optional[Path]":
cache_path = get_bench_cache_path("apps")
glob_pattern = f"{app_name}.{cache_key}.md5-*"
for app_cache_path in cache_path.glob(glob_pattern):
return app_cache_path
return None
def validate_cache_and_get_path(app_name: str, cache_key: str) -> "Optional[Path]":
if not cache_key:
return
if not (cache_path := get_app_cache_path(app_name, cache_key)):
return
if not cache_path.is_file():
click.secho(
f"Bench app-cache: file check failed for {cache_path.as_posix()}, skipping cache",
fg="yellow",
)
unlink_no_throw(cache_path)
return
if not is_cache_hash_valid(cache_path):
click.secho(
f"Bench app-cache: hash validation failed for {cache_path.as_posix()}, skipping cache",
fg="yellow",
)
unlink_no_throw(cache_path)
return
return cache_path
|
2302_79757062/bench
|
bench/app.py
|
Python
|
agpl-3.0
| 30,393
|
# imports - standard imports
import subprocess
from functools import lru_cache
import os
import shutil
import json
import sys
import logging
from typing import List, MutableSequence, TYPE_CHECKING, Union
# imports - module imports
import bench
from bench.exceptions import AppNotInstalledError, InvalidRemoteException, ValidationError
from bench.config.common_site_config import setup_config
from bench.utils import (
UNSET_ARG,
paths_in_bench,
exec_cmd,
is_bench_directory,
is_frappe_app,
get_cmd_output,
get_git_version,
log,
run_frappe_cmd,
)
from bench.utils.bench import (
validate_app_installed_on_sites,
restart_supervisor_processes,
restart_systemd_processes,
restart_process_manager,
remove_backups_crontab,
get_venv_path,
get_env_cmd,
)
from bench.utils.render import job, step
from bench.utils.app import get_current_version
from bench.app import is_git_repo
if TYPE_CHECKING:
from bench.app import App
logger = logging.getLogger(bench.PROJECT_NAME)
class Base:
def run(self, cmd, cwd=None, _raise=True):
return exec_cmd(cmd, cwd=cwd or self.cwd, _raise=_raise)
class Validator:
def validate_app_uninstall(self, app):
if app not in self.apps:
raise AppNotInstalledError(f"No app named {app}")
validate_app_installed_on_sites(app, bench_path=self.name)
@lru_cache(maxsize=None)
class Bench(Base, Validator):
def __init__(self, path):
self.name = path
self.cwd = os.path.abspath(path)
self.exists = is_bench_directory(self.name)
self.setup = BenchSetup(self)
self.teardown = BenchTearDown(self)
self.apps = BenchApps(self)
self.apps_txt = os.path.join(self.name, "sites", "apps.txt")
self.excluded_apps_txt = os.path.join(self.name, "sites", "excluded_apps.txt")
@property
def python(self) -> str:
return get_env_cmd("python", bench_path=self.name)
@property
def shallow_clone(self) -> bool:
config = self.conf
if config:
if config.get("release_bench") or not config.get("shallow_clone"):
return False
return get_git_version() > 1.9
@property
def excluded_apps(self) -> List:
try:
with open(self.excluded_apps_txt) as f:
return f.read().strip().split("\n")
except Exception:
return []
@property
def sites(self) -> List:
return [
path
for path in os.listdir(os.path.join(self.name, "sites"))
if os.path.exists(os.path.join("sites", path, "site_config.json"))
]
@property
def conf(self):
from bench.config.common_site_config import get_config
return get_config(self.name)
def init(self):
self.setup.dirs()
self.setup.env()
self.setup.backups()
def drop(self):
self.teardown.backups()
self.teardown.dirs()
def install(self, app, branch=None):
from bench.app import App
app = App(app, branch=branch)
self.apps.append(app)
self.apps.sync()
def uninstall(self, app, no_backup=False, force=False):
if app == "frappe":
raise ValidationError("You cannot uninstall the app `frappe`")
from bench.app import App
if not force:
self.validate_app_uninstall(app)
try:
self.apps.remove(App(app, bench=self, to_clone=False), no_backup=no_backup)
except InvalidRemoteException:
if not force:
raise
self.apps.sync()
# self.build() - removed because it seems unnecessary
self.reload(_raise=False)
@step(title="Building Bench Assets", success="Bench Assets Built")
def build(self):
# build assets & stuff
run_frappe_cmd("build", bench_path=self.name)
@step(title="Reloading Bench Processes", success="Bench Processes Reloaded")
def reload(self, web=False, supervisor=True, systemd=True, _raise=True):
"""If web is True, only web workers are restarted"""
conf = self.conf
if conf.get("developer_mode"):
restart_process_manager(bench_path=self.name, web_workers=web)
if supervisor or conf.get("restart_supervisor_on_update"):
restart_supervisor_processes(bench_path=self.name, web_workers=web, _raise=_raise)
if systemd and conf.get("restart_systemd_on_update"):
restart_systemd_processes(bench_path=self.name, web_workers=web, _raise=_raise)
def get_installed_apps(self) -> List:
"""Returns list of installed apps on bench, not in excluded_apps.txt"""
try:
installed_packages = get_cmd_output(f"{self.python} -m pip freeze", cwd=self.name)
except Exception:
installed_packages = []
return [
app
for app in self.apps
if app not in self.excluded_apps and app in installed_packages
]
class BenchApps(MutableSequence):
def __init__(self, bench: Bench):
self.bench = bench
self.states_path = os.path.join(self.bench.name, "sites", "apps.json")
self.apps_path = os.path.join(self.bench.name, "apps")
self.initialize_apps()
self.set_states()
def set_states(self):
try:
with open(self.states_path) as f:
self.states = json.loads(f.read() or "{}")
except FileNotFoundError:
self.states = {}
def update_apps_states(
self,
app_dir: str = None,
app_name: Union[str, None] = None,
branch: Union[str, None] = None,
required: List = UNSET_ARG,
):
if required == UNSET_ARG:
required = []
if self.apps and not os.path.exists(self.states_path):
# idx according to apps listed in apps.txt (backwards compatibility)
# Keeping frappe as the first app.
if "frappe" in self.apps:
self.apps.remove("frappe")
self.apps.insert(0, "frappe")
with open(self.bench.apps_txt, "w") as f:
f.write("\n".join(self.apps))
print("Found existing apps updating states...")
for idx, app in enumerate(self.apps, start=1):
self.states[app] = {
"resolution": {"commit_hash": None, "branch": None},
"required": required,
"idx": idx,
"version": get_current_version(app, self.bench.name),
}
apps_to_remove = []
for app in self.states:
if app not in self.apps:
apps_to_remove.append(app)
for app in apps_to_remove:
del self.states[app]
if app_name and not app_dir:
app_dir = app_name
if app_name and app_name not in self.states:
version = get_current_version(app_name, self.bench.name)
app_dir = os.path.join(self.apps_path, app_dir)
is_repo = is_git_repo(app_dir)
if is_repo:
if not branch:
branch = (
subprocess.check_output(
"git rev-parse --abbrev-ref HEAD", shell=True, cwd=app_dir
)
.decode("utf-8")
.rstrip()
)
commit_hash = (
subprocess.check_output(f"git rev-parse {branch}", shell=True, cwd=app_dir)
.decode("utf-8")
.rstrip()
)
self.states[app_name] = {
"is_repo": is_repo,
"resolution": "not a repo"
if not is_repo
else {"commit_hash": commit_hash, "branch": branch},
"required": required,
"idx": len(self.states) + 1,
"version": version,
}
with open(self.states_path, "w") as f:
f.write(json.dumps(self.states, indent=4))
def sync(
self,
app_name: Union[str, None] = None,
app_dir: Union[str, None] = None,
branch: Union[str, None] = None,
required: List = UNSET_ARG,
):
if required == UNSET_ARG:
required = []
self.initialize_apps()
with open(self.bench.apps_txt, "w") as f:
f.write("\n".join(self.apps))
self.update_apps_states(
app_name=app_name, app_dir=app_dir, branch=branch, required=required
)
def initialize_apps(self):
try:
self.apps = [
x
for x in os.listdir(os.path.join(self.bench.name, "apps"))
if is_frappe_app(os.path.join(self.bench.name, "apps", x))
]
self.apps.remove("frappe")
self.apps.insert(0, "frappe")
except FileNotFoundError:
self.apps = []
def __getitem__(self, key):
"""retrieves an item by its index, key"""
return self.apps[key]
def __setitem__(self, key, value):
"""set the item at index, key, to value"""
# should probably not be allowed
# self.apps[key] = value
raise NotImplementedError
def __delitem__(self, key):
"""removes the item at index, key"""
# TODO: uninstall and delete app from bench
del self.apps[key]
def __len__(self):
return len(self.apps)
def insert(self, key, value):
"""add an item, value, at index, key."""
# TODO: fetch and install app to bench
self.apps.insert(key, value)
def add(self, app: "App"):
app.get()
app.install()
super().append(app.app_name)
self.apps.sort()
def remove(self, app: "App", no_backup: bool = False):
app.uninstall()
app.remove(no_backup=no_backup)
super().remove(app.app_name)
def append(self, app: "App"):
return self.add(app)
def __repr__(self):
return self.__str__()
def __str__(self):
return str([x for x in self.apps])
class BenchSetup(Base):
def __init__(self, bench: Bench):
self.bench = bench
self.cwd = self.bench.cwd
@step(title="Setting Up Directories", success="Directories Set Up")
def dirs(self):
os.makedirs(self.bench.name, exist_ok=True)
for dirname in paths_in_bench:
os.makedirs(os.path.join(self.bench.name, dirname), exist_ok=True)
@step(title="Setting Up Environment", success="Environment Set Up")
def env(self, python="python3"):
"""Setup env folder
- create env if not exists
- upgrade env pip
- install frappe python dependencies
"""
import bench.cli
import click
verbose = bench.cli.verbose
click.secho("Setting Up Environment", fg="yellow")
frappe = os.path.join(self.bench.name, "apps", "frappe")
quiet_flag = "" if verbose else "--quiet"
if not os.path.exists(self.bench.python):
venv = get_venv_path(verbose=verbose, python=python)
self.run(f"{venv} env", cwd=self.bench.name)
self.pip()
self.wheel()
if os.path.exists(frappe):
self.run(
f"{self.bench.python} -m pip install {quiet_flag} --upgrade -e {frappe}",
cwd=self.bench.name,
)
@step(title="Setting Up Bench Config", success="Bench Config Set Up")
def config(self, redis=True, procfile=True, additional_config=None):
"""Setup config folder
- create pids folder
- generate sites/common_site_config.json
"""
setup_config(self.bench.name, additional_config=additional_config)
if redis:
from bench.config.redis import generate_config
generate_config(self.bench.name)
if procfile:
from bench.config.procfile import setup_procfile
setup_procfile(self.bench.name, skip_redis=not redis)
@step(title="Updating pip", success="Updated pip")
def pip(self, verbose=False):
"""Updates env pip; assumes that env is setup"""
import bench.cli
verbose = bench.cli.verbose or verbose
quiet_flag = "" if verbose else "--quiet"
return self.run(
f"{self.bench.python} -m pip install {quiet_flag} --upgrade pip", cwd=self.bench.name
)
@step(title="Installing wheel", success="Installed wheel")
def wheel(self, verbose=False):
"""Wheel is required for building old setup.py packages.
ref: https://github.com/pypa/pip/issues/8559"""
import bench.cli
verbose = bench.cli.verbose or verbose
quiet_flag = "" if verbose else "--quiet"
return self.run(
f"{self.bench.python} -m pip install {quiet_flag} wheel", cwd=self.bench.name
)
def logging(self):
from bench.utils import setup_logging
return setup_logging(bench_path=self.bench.name)
@step(title="Setting Up Bench Patches", success="Bench Patches Set Up")
def patches(self):
shutil.copy(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "patches", "patches.txt"),
os.path.join(self.bench.name, "patches.txt"),
)
@step(title="Setting Up Backups Cronjob", success="Backups Cronjob Set Up")
def backups(self):
# TODO: to something better for logging data? - maybe a wrapper that auto-logs with more context
logger.log("setting up backups")
from crontab import CronTab
bench_dir = os.path.abspath(self.bench.name)
user = self.bench.conf.get("frappe_user")
logfile = os.path.join(bench_dir, "logs", "backup.log")
system_crontab = CronTab(user=user)
backup_command = f"cd {bench_dir} && {sys.argv[0]} --verbose --site all backup"
job_command = f"{backup_command} >> {logfile} 2>&1"
if job_command not in str(system_crontab):
job = system_crontab.new(
command=job_command, comment="bench auto backups set for every 6 hours"
)
job.every(6).hours()
system_crontab.write()
logger.log("backups were set up")
@job(title="Setting Up Bench Dependencies", success="Bench Dependencies Set Up")
def requirements(self, apps=None):
"""Install and upgrade specified / all installed apps on given Bench"""
from bench.app import App
apps = apps or self.bench.apps
self.pip()
print(f"Installing {len(apps)} applications...")
for app in apps:
path_to_app = os.path.join(self.bench.name, "apps", app)
app = App(path_to_app, bench=self.bench, to_clone=False).install(
skip_assets=True, restart_bench=False, ignore_resolution=True
)
def python(self, apps=None):
"""Install and upgrade Python dependencies for specified / all installed apps on given Bench"""
import bench.cli
apps = apps or self.bench.apps
quiet_flag = "" if bench.cli.verbose else "--quiet"
self.pip()
for app in apps:
app_path = os.path.join(self.bench.name, "apps", app)
log(f"\nInstalling python dependencies for {app}", level=3, no_log=True)
self.run(f"{self.bench.python} -m pip install {quiet_flag} --upgrade -e {app_path}")
def node(self, apps=None):
"""Install and upgrade Node dependencies for specified / all apps on given Bench"""
from bench.utils.bench import update_node_packages
return update_node_packages(bench_path=self.bench.name, apps=apps)
class BenchTearDown:
def __init__(self, bench):
self.bench = bench
def backups(self):
remove_backups_crontab(self.bench.name)
def dirs(self):
shutil.rmtree(self.bench.name)
|
2302_79757062/bench
|
bench/bench.py
|
Python
|
agpl-3.0
| 13,559
|
# imports - standard imports
import atexit
from contextlib import contextmanager
from logging import Logger
import os
import pwd
import sys
# imports - third party imports
import click
# imports - module imports
import bench
from bench.bench import Bench
from bench.commands import bench_command
from bench.config.common_site_config import get_config
from bench.utils import (
check_latest_version,
drop_privileges,
find_parent_bench,
get_env_frappe_commands,
get_cmd_output,
is_bench_directory,
is_dist_editable,
is_root,
log,
setup_logging,
get_cmd_from_sysargv,
)
from bench.utils.bench import get_env_cmd
from importlib.util import find_spec
# these variables are used to show dynamic outputs on the terminal
dynamic_feed = False
verbose = False
is_envvar_warn_set = None
from_command_line = False # set when commands are executed via the CLI
bench.LOG_BUFFER = []
change_uid_msg = "You should not run this command as root"
src = os.path.dirname(__file__)
SKIP_MODULE_TRACEBACK = ("click",)
@contextmanager
def execute_cmd(check_for_update=True, command: str = None, logger: Logger = None):
if check_for_update:
atexit.register(check_latest_version)
try:
yield
except BaseException as e:
return_code = getattr(e, "code", 1)
if isinstance(e, Exception):
click.secho(f"ERROR: {e}", fg="red")
if return_code:
logger.warning(f"{command} executed with exit code {return_code}")
raise e
def cli():
setup_clear_cache()
global from_command_line, bench_config, is_envvar_warn_set, verbose
from_command_line = True
command = " ".join(sys.argv)
argv = set(sys.argv)
is_envvar_warn_set = not (os.environ.get("BENCH_DEVELOPER") or os.environ.get("CI"))
is_cli_command = len(sys.argv) > 1 and not argv.intersection({"src", "--version"})
cmd_from_sys = get_cmd_from_sysargv()
if "--verbose" in argv:
verbose = True
change_working_directory()
logger = setup_logging()
logger.info(command)
bench_config = get_config(".")
if is_cli_command:
check_uid()
change_uid()
change_dir()
if (
is_envvar_warn_set
and is_cli_command
and not bench_config.get("developer_mode")
and is_dist_editable(bench.PROJECT_NAME)
):
log(
"bench is installed in editable mode!\n\nThis is not the recommended mode"
" of installation for production. Instead, install the package from PyPI"
" with: `pip install frappe-bench`\n",
level=3,
)
in_bench = is_bench_directory()
if (
not in_bench
and len(sys.argv) > 1
and not argv.intersection(
{"init", "find", "src", "drop", "get", "get-app", "--version"}
)
and not cmd_requires_root()
):
log("Command not being executed in bench directory", level=3)
if len(sys.argv) == 1 or sys.argv[1] == "--help":
print(click.Context(bench_command).get_help())
if in_bench:
print(get_frappe_help())
return
_opts = [x.opts + x.secondary_opts for x in bench_command.params]
opts = {item for sublist in _opts for item in sublist}
setup_exception_handler()
# handle usages like `--use-feature='feat-x'` and `--use-feature 'feat-x'`
if cmd_from_sys and cmd_from_sys.split("=", 1)[0].strip() in opts:
bench_command()
if cmd_from_sys in bench_command.commands:
with execute_cmd(check_for_update=is_cli_command, command=command, logger=logger):
bench_command()
if in_bench:
frappe_cmd()
bench_command()
def check_uid():
if cmd_requires_root() and not is_root():
log("superuser privileges required for this command", level=3)
sys.exit(1)
def cmd_requires_root():
if len(sys.argv) > 2 and sys.argv[2] in (
"production",
"sudoers",
"lets-encrypt",
"fonts",
"print",
"firewall",
"ssh-port",
"role",
"fail2ban",
"wildcard-ssl",
):
return True
if len(sys.argv) >= 2 and sys.argv[1] in (
"patch",
"renew-lets-encrypt",
"disable-production",
):
return True
if len(sys.argv) > 2 and sys.argv[1] in ("install"):
return True
def change_dir():
if os.path.exists("config.json") or "init" in sys.argv:
return
dir_path_file = "/etc/frappe_bench_dir"
if os.path.exists(dir_path_file):
with open(dir_path_file) as f:
dir_path = f.read().strip()
if os.path.exists(dir_path):
os.chdir(dir_path)
def change_uid():
if is_root() and not cmd_requires_root():
frappe_user = bench_config.get("frappe_user")
if frappe_user:
drop_privileges(uid_name=frappe_user, gid_name=frappe_user)
os.environ["HOME"] = pwd.getpwnam(frappe_user).pw_dir
else:
log(change_uid_msg, level=3)
sys.exit(1)
def app_cmd(bench_path="."):
f = get_env_cmd("python", bench_path=bench_path)
os.chdir(os.path.join(bench_path, "sites"))
os.execv(f, [f] + ["-m", "frappe.utils.bench_helper"] + sys.argv[1:])
def frappe_cmd(bench_path="."):
f = get_env_cmd("python", bench_path=bench_path)
os.chdir(os.path.join(bench_path, "sites"))
os.execv(f, [f] + ["-m", "frappe.utils.bench_helper", "frappe"] + sys.argv[1:])
def get_frappe_commands():
if not is_bench_directory():
return set()
return set(get_env_frappe_commands())
def get_frappe_help(bench_path="."):
python = get_env_cmd("python", bench_path=bench_path)
sites_path = os.path.join(bench_path, "sites")
try:
out = get_cmd_output(
f"{python} -m frappe.utils.bench_helper get-frappe-help", cwd=sites_path
)
return "\n\nFramework commands:\n" + out.split("Commands:")[1]
except Exception:
return ""
def change_working_directory():
"""Allows bench commands to be run from anywhere inside a bench directory"""
cur_dir = os.path.abspath(".")
bench_path = find_parent_bench(cur_dir)
bench.current_path = os.getcwd()
bench.updated_path = bench_path
if bench_path:
os.chdir(bench_path)
def setup_clear_cache():
from copy import copy
f = copy(os.chdir)
def _chdir(*args, **kwargs):
Bench.cache_clear()
get_env_cmd.cache_clear()
return f(*args, **kwargs)
os.chdir = _chdir
def setup_exception_handler():
from traceback import format_exception
from bench.exceptions import CommandFailedError
def handle_exception(exc_type, exc_info, tb):
if exc_type == CommandFailedError:
print("".join(generate_exc(exc_type, exc_info, tb)))
else:
sys.__excepthook__(exc_type, exc_info, tb)
def generate_exc(exc_type, exc_info, tb):
TB_SKIP = [
os.path.dirname(find_spec(module).origin) for module in SKIP_MODULE_TRACEBACK
]
for tb_line in format_exception(exc_type, exc_info, tb):
for skip_module in TB_SKIP:
if skip_module not in tb_line:
yield tb_line
sys.excepthook = handle_exception
|
2302_79757062/bench
|
bench/cli.py
|
Python
|
agpl-3.0
| 6,483
|
# imports - third party imports
import click
# imports - module imports
from bench.utils.cli import (
MultiCommandGroup,
print_bench_version,
use_experimental_feature,
setup_verbosity,
)
@click.group(cls=MultiCommandGroup)
@click.option(
"--version",
is_flag=True,
is_eager=True,
callback=print_bench_version,
expose_value=False,
)
@click.option(
"--use-feature",
is_eager=True,
callback=use_experimental_feature,
expose_value=False,
)
@click.option(
"-v",
"--verbose",
is_flag=True,
callback=setup_verbosity,
expose_value=False,
)
def bench_command(bench_path="."):
import bench
bench.set_frappe_version(bench_path=bench_path)
from bench.commands.make import (
drop,
exclude_app_for_update,
get_app,
include_app_for_update,
init,
new_app,
pip,
remove_app,
validate_dependencies,
)
bench_command.add_command(init)
bench_command.add_command(drop)
bench_command.add_command(get_app)
bench_command.add_command(new_app)
bench_command.add_command(remove_app)
bench_command.add_command(exclude_app_for_update)
bench_command.add_command(include_app_for_update)
bench_command.add_command(pip)
bench_command.add_command(validate_dependencies)
from bench.commands.update import (
retry_upgrade,
switch_to_branch,
switch_to_develop,
update,
)
bench_command.add_command(update)
bench_command.add_command(retry_upgrade)
bench_command.add_command(switch_to_branch)
bench_command.add_command(switch_to_develop)
from bench.commands.utils import (
app_cache_helper,
backup_all_sites,
bench_src,
disable_production,
download_translations,
find_benches,
migrate_env,
renew_lets_encrypt,
restart,
set_mariadb_host,
set_nginx_port,
set_redis_cache_host,
set_redis_queue_host,
set_redis_socketio_host,
set_ssl_certificate,
set_ssl_certificate_key,
set_url_root,
start,
)
bench_command.add_command(start)
bench_command.add_command(restart)
bench_command.add_command(set_nginx_port)
bench_command.add_command(set_ssl_certificate)
bench_command.add_command(set_ssl_certificate_key)
bench_command.add_command(set_url_root)
bench_command.add_command(set_mariadb_host)
bench_command.add_command(set_redis_cache_host)
bench_command.add_command(set_redis_queue_host)
bench_command.add_command(set_redis_socketio_host)
bench_command.add_command(download_translations)
bench_command.add_command(backup_all_sites)
bench_command.add_command(renew_lets_encrypt)
bench_command.add_command(disable_production)
bench_command.add_command(bench_src)
bench_command.add_command(find_benches)
bench_command.add_command(migrate_env)
bench_command.add_command(app_cache_helper)
from bench.commands.setup import setup
bench_command.add_command(setup)
from bench.commands.config import config
bench_command.add_command(config)
from bench.commands.git import remote_reset_url, remote_set_url, remote_urls
bench_command.add_command(remote_set_url)
bench_command.add_command(remote_reset_url)
bench_command.add_command(remote_urls)
from bench.commands.install import install
bench_command.add_command(install)
|
2302_79757062/bench
|
bench/commands/__init__.py
|
Python
|
agpl-3.0
| 3,033
|
# imports - module imports
from bench.config.common_site_config import update_config, put_config
# imports - third party imports
import click
@click.group(help="Change bench configuration")
def config():
pass
@click.command(
"restart_supervisor_on_update",
help="Enable/Disable auto restart of supervisor processes",
)
@click.argument("state", type=click.Choice(["on", "off"]))
def config_restart_supervisor_on_update(state):
update_config({"restart_supervisor_on_update": state == "on"})
@click.command(
"restart_systemd_on_update", help="Enable/Disable auto restart of systemd units"
)
@click.argument("state", type=click.Choice(["on", "off"]))
def config_restart_systemd_on_update(state):
update_config({"restart_systemd_on_update": state == "on"})
@click.command(
"dns_multitenant", help="Enable/Disable bench multitenancy on running bench update"
)
@click.argument("state", type=click.Choice(["on", "off"]))
def config_dns_multitenant(state):
update_config({"dns_multitenant": state == "on"})
@click.command(
"serve_default_site", help="Configure nginx to serve the default site on port 80"
)
@click.argument("state", type=click.Choice(["on", "off"]))
def config_serve_default_site(state):
update_config({"serve_default_site": state == "on"})
@click.command("rebase_on_pull", help="Rebase repositories on pulling")
@click.argument("state", type=click.Choice(["on", "off"]))
def config_rebase_on_pull(state):
update_config({"rebase_on_pull": state == "on"})
@click.command("http_timeout", help="Set HTTP timeout")
@click.argument("seconds", type=int)
def config_http_timeout(seconds):
update_config({"http_timeout": seconds})
@click.command("set-common-config", help="Set value in common config")
@click.option("configs", "-c", "--config", multiple=True, type=(str, str))
def set_common_config(configs):
import ast
common_site_config = {}
for key, value in configs:
if value in ("true", "false"):
value = value.title()
try:
value = ast.literal_eval(value)
except ValueError:
pass
common_site_config[key] = value
update_config(common_site_config, bench_path=".")
@click.command(
"remove-common-config", help="Remove specific keys from current bench's common config"
)
@click.argument("keys", nargs=-1)
def remove_common_config(keys):
from bench.bench import Bench
common_site_config = Bench(".").conf
for key in keys:
if key in common_site_config:
del common_site_config[key]
put_config(common_site_config)
config.add_command(config_restart_supervisor_on_update)
config.add_command(config_restart_systemd_on_update)
config.add_command(config_dns_multitenant)
config.add_command(config_rebase_on_pull)
config.add_command(config_serve_default_site)
config.add_command(config_http_timeout)
config.add_command(set_common_config)
config.add_command(remove_common_config)
|
2302_79757062/bench
|
bench/commands/config.py
|
Python
|
agpl-3.0
| 2,838
|
# imports - standard imports
import os
import subprocess
# imports - module imports
from bench.bench import Bench
from bench.app import get_repo_dir
from bench.utils import set_git_remote_url
from bench.utils.app import get_remote
# imports - third party imports
import click
@click.command('remote-set-url', help="Set app remote url")
@click.argument('git-url')
def remote_set_url(git_url):
set_git_remote_url(git_url)
@click.command('remote-reset-url', help="Reset app remote url to frappe official")
@click.argument('app')
def remote_reset_url(app):
git_url = f"https://github.com/frappe/{app}.git"
set_git_remote_url(git_url)
@click.command('remote-urls', help="Show apps remote url")
def remote_urls():
for app in Bench(".").apps:
repo_dir = get_repo_dir(app)
if os.path.exists(os.path.join(repo_dir, '.git')):
remote = get_remote(app)
remote_url = subprocess.check_output(['git', 'config', '--get', f'remote.{remote}.url'], cwd=repo_dir).strip()
print(f"{app}\t{remote_url}")
|
2302_79757062/bench
|
bench/commands/git.py
|
Python
|
agpl-3.0
| 1,009
|
# imports - module imports
from bench.utils import run_playbook
from bench.utils.system import setup_sudoers
# imports - third party imports
import click
extra_vars = {"production": True}
@click.group(help="Install system dependencies for setting up Frappe environment")
def install():
pass
@click.command(
"prerequisites",
help="Installs pre-requisite libraries, essential tools like b2zip, htop, screen, vim, x11-fonts, python libs, cups and Redis",
)
def install_prerequisites():
run_playbook("site.yml", tag="common, redis")
@click.command(
"mariadb", help="Install and setup MariaDB of specified version and root password"
)
@click.option("--mysql_root_password", "--mysql-root-password",
"--mariadb_root_password", "--mariadb-root-password", default="")
@click.option("--version", default="10.3")
def install_mariadb(mysql_root_password, version):
if mysql_root_password:
extra_vars.update(
{
"mysql_root_password": mysql_root_password,
}
)
extra_vars.update({"mariadb_version": version})
run_playbook("site.yml", extra_vars=extra_vars, tag="mariadb")
@click.command("wkhtmltopdf", help="Installs wkhtmltopdf v0.12.3 for linux")
def install_wkhtmltopdf():
run_playbook("site.yml", extra_vars=extra_vars, tag="wkhtmltopdf")
@click.command("nodejs", help="Installs Node.js v8")
def install_nodejs():
run_playbook("site.yml", extra_vars=extra_vars, tag="nodejs")
@click.command("psutil", help="Installs psutil via pip")
def install_psutil():
run_playbook("site.yml", extra_vars=extra_vars, tag="psutil")
@click.command(
"supervisor",
help="Installs supervisor. If user is specified, sudoers is setup for that user",
)
@click.option("--user")
def install_supervisor(user=None):
run_playbook("site.yml", extra_vars=extra_vars, tag="supervisor")
if user:
setup_sudoers(user)
@click.command(
"nginx", help="Installs NGINX. If user is specified, sudoers is setup for that user"
)
@click.option("--user")
def install_nginx(user=None):
run_playbook("site.yml", extra_vars=extra_vars, tag="nginx")
if user:
setup_sudoers(user)
@click.command("virtualbox", help="Installs virtualbox")
def install_virtualbox():
run_playbook("vm_build.yml", tag="virtualbox")
@click.command("packer", help="Installs Oracle virtualbox and packer 1.2.1")
def install_packer():
run_playbook("vm_build.yml", tag="packer")
@click.command(
"fail2ban",
help="Install fail2ban, an intrusion prevention software framework that protects computer servers from brute-force attacks",
)
@click.option(
"--maxretry",
default=6,
help="Number of matches (i.e. value of the counter) which triggers ban action on the IP.",
)
@click.option(
"--bantime",
default=600,
help="The counter is set to zero if no match is found within 'findtime' seconds.",
)
@click.option(
"--findtime",
default=600,
help='Duration (in seconds) for IP to be banned for. Negative number for "permanent" ban.',
)
def install_failtoban(**kwargs):
extra_vars.update(kwargs)
run_playbook("site.yml", extra_vars=extra_vars, tag="fail2ban")
install.add_command(install_prerequisites)
install.add_command(install_mariadb)
install.add_command(install_wkhtmltopdf)
install.add_command(install_nodejs)
install.add_command(install_psutil)
install.add_command(install_supervisor)
install.add_command(install_nginx)
install.add_command(install_failtoban)
install.add_command(install_virtualbox)
install.add_command(install_packer)
|
2302_79757062/bench
|
bench/commands/install.py
|
Python
|
agpl-3.0
| 3,435
|
# imports - third party imports
import click
@click.command("init", help="Initialize a new bench instance in the specified path")
@click.argument("path")
@click.option(
"--version",
"--frappe-branch",
"frappe_branch",
default=None,
help="Clone a particular branch of frappe",
)
@click.option(
"--ignore-exist", is_flag=True, default=False, help="Ignore if Bench instance exists."
)
@click.option(
"--python", type=str, default="python3", help="Path to Python Executable."
)
@click.option(
"--apps_path", default=None, help="path to json files with apps to install after init"
)
@click.option("--frappe-path", default=None, help="path to frappe repo")
@click.option("--clone-from", default=None, help="copy repos from path")
@click.option(
"--clone-without-update", is_flag=True, help="copy repos from path without update"
)
@click.option("--no-procfile", is_flag=True, help="Do not create a Procfile")
@click.option(
"--no-backups",
is_flag=True,
help="Do not set up automatic periodic backups for all sites on this bench",
)
@click.option(
"--skip-redis-config-generation",
is_flag=True,
help="Skip redis config generation if already specifying the common-site-config file",
)
@click.option("--skip-assets", is_flag=True, default=False, help="Do not build assets")
@click.option("--install-app", help="Install particular app after initialization")
@click.option("--verbose", is_flag=True, help="Verbose output during install")
@click.option(
"--dev",
is_flag=True,
default=False,
help="Enable developer mode and install development dependencies.",
)
def init(
path,
apps_path,
frappe_path,
frappe_branch,
no_procfile,
no_backups,
clone_from,
verbose,
skip_redis_config_generation,
clone_without_update,
ignore_exist=False,
skip_assets=False,
python="python3",
install_app=None,
dev=False,
):
import os
from bench.utils import log
from bench.utils.system import init
if not ignore_exist and os.path.exists(path):
log(f"Bench instance already exists at {path}", level=2)
return
try:
init(
path,
apps_path=apps_path, # can be used from --config flag? Maybe config file could have more info?
no_procfile=no_procfile,
no_backups=no_backups,
frappe_path=frappe_path,
frappe_branch=frappe_branch,
install_app=install_app,
clone_from=clone_from,
skip_redis_config_generation=skip_redis_config_generation,
clone_without_update=clone_without_update,
skip_assets=skip_assets,
python=python,
verbose=verbose,
dev=dev,
)
log(f"Bench {path} initialized", level=1)
except SystemExit:
raise
except Exception:
import shutil
import time
from bench.utils import get_traceback
# add a sleep here so that the traceback of other processes doesnt overlap with the prompts
time.sleep(1)
print(get_traceback())
log(f"There was a problem while creating {path}", level=2)
if click.confirm("Do you want to rollback these changes?", abort=True):
log(f'Rolling back Bench "{path}"')
if os.path.exists(path):
shutil.rmtree(path)
@click.command("drop")
@click.argument("path")
def drop(path):
from bench.bench import Bench
from bench.exceptions import BenchNotFoundError, ValidationError
bench = Bench(path)
if not bench.exists:
raise BenchNotFoundError(f"Bench {bench.name} does not exist")
if bench.sites:
raise ValidationError("Cannot remove non-empty bench directory")
bench.drop()
print("Bench dropped")
@click.command(
["get", "get-app"],
help="Clone an app from the internet or filesystem and set it up in your bench",
)
@click.argument("name", nargs=-1) # Dummy argument for backward compatibility
@click.argument("git-url")
@click.option("--branch", default=None, help="branch to checkout")
@click.option("--overwrite", is_flag=True, default=False)
@click.option("--skip-assets", is_flag=True, default=False, help="Do not build assets")
@click.option(
"--soft-link",
is_flag=True,
default=False,
help="Create a soft link to git repo instead of clone.",
)
@click.option(
"--init-bench", is_flag=True, default=False, help="Initialize Bench if not in one"
)
@click.option(
"--resolve-deps",
is_flag=True,
default=False,
help="Resolve dependencies before installing app",
)
@click.option(
"--cache-key",
type=str,
default=None,
help="Caches get-app artifacts if provided (only first 10 chars is used)",
)
@click.option(
"--compress-artifacts",
is_flag=True,
default=False,
help="Whether to gzip get-app artifacts that are to be cached",
)
def get_app(
git_url,
branch,
name=None,
overwrite=False,
skip_assets=False,
soft_link=False,
init_bench=False,
resolve_deps=False,
cache_key=None,
compress_artifacts=False,
):
"clone an app from the internet and set it up in your bench"
from bench.app import get_app
get_app(
git_url,
branch=branch,
skip_assets=skip_assets,
overwrite=overwrite,
soft_link=soft_link,
init_bench=init_bench,
resolve_deps=resolve_deps,
cache_key=cache_key,
compress_artifacts=compress_artifacts,
)
@click.command("new-app", help="Create a new Frappe application under apps folder")
@click.option(
"--no-git",
is_flag=True,
flag_value="--no-git",
help="Do not initialize git repository for the app (available in Frappe v14+)",
)
@click.argument("app-name")
def new_app(app_name, no_git=None):
from bench.app import new_app
new_app(app_name, no_git)
@click.command(
["remove", "rm", "remove-app"],
help=(
"Completely remove app from bench and re-build assets if not installed on any site"
),
)
@click.option("--no-backup", is_flag=True, help="Do not backup app before removing")
@click.option("--force", is_flag=True, help="Force remove app")
@click.argument("app-name")
def remove_app(app_name, no_backup=False, force=False):
from bench.bench import Bench
bench = Bench(".")
bench.uninstall(app_name, no_backup=no_backup, force=force)
@click.command("exclude-app", help="Exclude app from updating")
@click.argument("app_name")
def exclude_app_for_update(app_name):
from bench.app import add_to_excluded_apps_txt
add_to_excluded_apps_txt(app_name)
@click.command("include-app", help="Include app for updating")
@click.argument("app_name")
def include_app_for_update(app_name):
"Include app from updating"
from bench.app import remove_from_excluded_apps_txt
remove_from_excluded_apps_txt(app_name)
@click.command(
"pip",
context_settings={"ignore_unknown_options": True, "help_option_names": []},
help="For pip help use `bench pip help [COMMAND]` or `bench pip [COMMAND] -h`",
)
@click.argument("args", nargs=-1)
@click.pass_context
def pip(ctx, args):
"Run pip commands in bench env"
import os
from bench.utils.bench import get_env_cmd
env_py = get_env_cmd("python")
os.execv(env_py, (env_py, "-m", "pip") + args)
@click.command(
"validate-dependencies",
help="Validates that all requirements specified in frappe-dependencies are met curently.",
)
@click.pass_context
def validate_dependencies(ctx):
"Validate all specified frappe-dependencies."
from bench.bench import Bench
from bench.app import App
bench = Bench(".")
for app_name in bench.apps:
app = App(app_name, bench=bench)
app.validate_app_dependencies(throw=True)
|
2302_79757062/bench
|
bench/commands/make.py
|
Python
|
agpl-3.0
| 7,178
|
# imports - standard imports
import os
import sys
# imports - third party imports
import click
# imports - module imports
from bench.utils import exec_cmd, run_playbook, which
from bench.utils.cli import SugaredOption
@click.group(help="Setup command group for enabling setting up a Frappe environment")
def setup():
pass
@click.command(
"sudoers", help="Add commands to sudoers list for execution without password"
)
@click.argument("user")
def setup_sudoers(user):
from bench.utils.system import setup_sudoers
setup_sudoers(user)
@click.command("nginx", help="Generate configuration files for NGINX")
@click.option(
"--logging", default="combined", type=click.Choice(["none", "site", "combined"])
)
@click.option(
"--log_format",
help="Specify the log_format for nginx. Use none or '' to not set a value.",
only_if_set=["logging"],
cls=SugaredOption,
default="main",
)
@click.option(
"--yes", help="Yes to regeneration of nginx config file", default=False, is_flag=True
)
def setup_nginx(yes=False, logging="combined", log_format=None):
from bench.config.nginx import make_nginx_conf
make_nginx_conf(bench_path=".", yes=yes, logging=logging, log_format=log_format)
@click.command("reload-nginx", help="Checks NGINX config file and reloads service")
def reload_nginx():
from bench.config.production_setup import reload_nginx
reload_nginx()
@click.command("supervisor", help="Generate configuration for supervisor")
@click.option("--user", help="optional user argument")
@click.option(
"--yes", help="Yes to regeneration of supervisor config", is_flag=True, default=False
)
@click.option(
"--skip-redis", help="Skip redis configuration", is_flag=True, default=False
)
@click.option(
"--skip-supervisord",
help="Skip supervisord configuration",
is_flag=True,
default=False,
)
def setup_supervisor(user=None, yes=False, skip_redis=False, skip_supervisord=False):
from bench.utils import get_cmd_output
from bench.config.supervisor import (
check_supervisord_config,
generate_supervisor_config,
)
if which("supervisorctl") is None:
click.secho("Please install `supervisor` to proceed", fg="red")
sys.exit(1)
if not skip_supervisord and "Permission denied" in get_cmd_output(
"supervisorctl status"
):
check_supervisord_config(user=user)
generate_supervisor_config(bench_path=".", user=user, yes=yes, skip_redis=skip_redis)
@click.command("redis", help="Generates configuration for Redis")
def setup_redis():
from bench.config.redis import generate_config
generate_config(".")
@click.command("fonts", help="Add Frappe fonts to system")
def setup_fonts():
from bench.utils.system import setup_fonts
setup_fonts()
@click.command(
"production", help="Setup Frappe production environment for specific user"
)
@click.argument("user")
@click.option("--yes", help="Yes to regeneration config", is_flag=True, default=False)
def setup_production(user, yes=False):
from bench.config.production_setup import setup_production
setup_production(user=user, yes=yes)
@click.command("backups", help="Add cronjob for bench backups")
def setup_backups():
from bench.bench import Bench
Bench(".").setup.backups()
@click.command("env", help="Setup Python environment for bench")
@click.option(
"--python", type=str, default="python3", help="Path to Python Executable."
)
def setup_env(python="python3"):
from bench.bench import Bench
return Bench(".").setup.env(python=python)
@click.command("firewall", help="Setup firewall for system")
@click.option("--ssh_port")
@click.option("--force")
def setup_firewall(ssh_port=None, force=False):
if not force:
click.confirm(
f"Setting up the firewall will block all ports except 80, 443 and {ssh_port}\nDo you want to continue?",
abort=True,
)
if not ssh_port:
ssh_port = 22
run_playbook("roles/bench/tasks/setup_firewall.yml", {"ssh_port": ssh_port})
@click.command("ssh-port", help="Set SSH Port for system")
@click.argument("port")
@click.option("--force")
def set_ssh_port(port, force=False):
if not force:
click.confirm(
f"This will change your SSH Port to {port}\nDo you want to continue?", abort=True
)
run_playbook("roles/bench/tasks/change_ssh_port.yml", {"ssh_port": port})
@click.command("lets-encrypt", help="Setup lets-encrypt SSL for site")
@click.argument("site")
@click.option("--custom-domain")
@click.option(
"-n",
"--non-interactive",
default=False,
is_flag=True,
help="Run command non-interactively. This flag restarts nginx and runs certbot non interactively. Shouldn't be used on 1'st attempt",
)
def setup_letsencrypt(site, custom_domain, non_interactive):
from bench.config.lets_encrypt import setup_letsencrypt
setup_letsencrypt(site, custom_domain, bench_path=".", interactive=not non_interactive)
@click.command(
"wildcard-ssl", help="Setup wildcard SSL certificate for multi-tenant bench"
)
@click.argument("domain")
@click.option("--email")
@click.option(
"--exclude-base-domain",
default=False,
is_flag=True,
help="SSL Certificate not applicable for base domain",
)
def setup_wildcard_ssl(domain, email, exclude_base_domain):
from bench.config.lets_encrypt import setup_wildcard_ssl
setup_wildcard_ssl(
domain, email, bench_path=".", exclude_base_domain=exclude_base_domain
)
@click.command("procfile", help="Generate Procfile for bench start")
def setup_procfile():
from bench.config.procfile import setup_procfile
setup_procfile(".")
@click.command(
"socketio", help="[DEPRECATED] Setup node dependencies for socketio server"
)
def setup_socketio():
return
@click.command("requirements")
@click.option("--node", help="Update only Node packages", default=False, is_flag=True)
@click.option(
"--python", help="Update only Python packages", default=False, is_flag=True
)
@click.option(
"--dev",
help="Install optional python development dependencies",
default=False,
is_flag=True,
)
@click.argument("apps", nargs=-1)
def setup_requirements(node=False, python=False, dev=False, apps=None):
"""
Setup Python and Node dependencies.
You can optionally specify one or more apps to setup dependencies for.
"""
from bench.bench import Bench
bench = Bench(".")
if not (node or python or dev):
bench.setup.requirements(apps=apps)
elif not node and not dev:
bench.setup.python(apps=apps)
elif not python and not dev:
bench.setup.node(apps=apps)
else:
from bench.utils.bench import install_python_dev_dependencies
install_python_dev_dependencies(apps=apps)
if node:
click.secho(
"--dev flag only supports python dependencies. All node development dependencies are installed by default.",
fg="yellow",
)
@click.command(
"manager",
help="Setup bench-manager.local site with the bench_manager app installed on it",
)
@click.option(
"--yes", help="Yes to regeneration of nginx config file", default=False, is_flag=True
)
@click.option(
"--port", help="Port on which you want to run bench manager", default=23624
)
@click.option("--domain", help="Domain on which you want to run bench manager")
def setup_manager(yes=False, port=23624, domain=None):
from bench.bench import Bench
from bench.config.nginx import make_bench_manager_nginx_conf
create_new_site = True
if "bench-manager.local" in os.listdir("sites"):
create_new_site = click.confirm("Site already exists. Overwrite existing site?")
if create_new_site:
exec_cmd("bench new-site --force bench-manager.local")
if "bench_manager" in os.listdir("apps"):
print("App already exists. Skipping app download.")
else:
exec_cmd("bench get-app bench_manager")
exec_cmd("bench --site bench-manager.local install-app bench_manager")
bench_path = "."
bench = Bench(bench_path)
if bench.conf.get("restart_supervisor_on_update") or bench.conf.get(
"restart_systemd_on_update"
):
# implicates a production setup or so I presume
if not domain:
print(
"Please specify the site name on which you want to host bench-manager using the 'domain' flag"
)
sys.exit(1)
if domain not in bench.sites:
raise Exception("No such site")
make_bench_manager_nginx_conf(bench_path, yes=yes, port=port, domain=domain)
@click.command("config", help="Generate or over-write sites/common_site_config.json")
def setup_config():
from bench.config.common_site_config import setup_config
setup_config(".")
@click.command("add-domain", help="Add a custom domain to a particular site")
@click.argument("domain")
@click.option("--site", prompt=True)
@click.option("--ssl-certificate", help="Absolute path to SSL Certificate")
@click.option("--ssl-certificate-key", help="Absolute path to SSL Certificate Key")
def add_domain(domain, site=None, ssl_certificate=None, ssl_certificate_key=None):
"""Add custom domain to site"""
if not site:
print("Please specify site")
sys.exit(1)
from bench.config.site_config import add_domain
add_domain(site, domain, ssl_certificate, ssl_certificate_key, bench_path=".")
@click.command("remove-domain", help="Remove custom domain from a site")
@click.argument("domain")
@click.option("--site", prompt=True)
def remove_domain(domain, site=None):
if not site:
print("Please specify site")
sys.exit(1)
from bench.config.site_config import remove_domain
remove_domain(site, domain, bench_path=".")
@click.command(
"sync-domains",
help="Check if there is a change in domains. If yes, updates the domains list.",
)
@click.option("--domain", multiple=True)
@click.option("--site", prompt=True)
def sync_domains(domain=None, site=None):
if not site:
print("Please specify site")
sys.exit(1)
try:
domains = list(map(str, domain))
except Exception:
print("Domains should be a json list of strings or dictionaries")
sys.exit(1)
from bench.config.site_config import sync_domains
changed = sync_domains(site, domains, bench_path=".")
# if changed, success, else failure
sys.exit(0 if changed else 1)
@click.command("role", help="Install dependencies via ansible roles")
@click.argument("role")
@click.option("--admin_emails", default="")
@click.option("--mysql_root_password", "--mariadb_root_password")
@click.option("--container", is_flag=True, default=False)
def setup_roles(role, **kwargs):
extra_vars = {"production": True}
extra_vars.update(kwargs)
if role:
run_playbook("site.yml", extra_vars=extra_vars, tag=role)
else:
run_playbook("site.yml", extra_vars=extra_vars)
@click.command(
"fail2ban",
help="Setup fail2ban, an intrusion prevention software framework that protects computer servers from brute-force attacks",
)
@click.option(
"--maxretry",
default=6,
help="Number of matches (i.e. value of the counter) which triggers ban action on the IP. Default is 6 seconds",
)
@click.option(
"--bantime",
default=600,
help="Duration (in seconds) for IP to be banned for. Negative number for 'permanent' ban. Default is 600 seconds",
)
@click.option(
"--findtime",
default=600,
help="The counter is set to zero if match found within 'findtime' seconds doesn't exceed 'maxretry'. Default is 600 seconds",
)
def setup_nginx_proxy_jail(**kwargs):
run_playbook("roles/fail2ban/tasks/configure_nginx_jail.yml", extra_vars=kwargs)
@click.command("systemd", help="Generate configuration for systemd")
@click.option("--user", help="Optional user argument")
@click.option(
"--yes",
help="Yes to regeneration of systemd config files",
is_flag=True,
default=False,
)
@click.option("--stop", help="Stop bench services", is_flag=True, default=False)
@click.option("--create-symlinks", help="Create Symlinks", is_flag=True, default=False)
@click.option("--delete-symlinks", help="Delete Symlinks", is_flag=True, default=False)
def setup_systemd(
user=None, yes=False, stop=False, create_symlinks=False, delete_symlinks=False
):
from bench.config.systemd import generate_systemd_config
generate_systemd_config(
bench_path=".",
user=user,
yes=yes,
stop=stop,
create_symlinks=create_symlinks,
delete_symlinks=delete_symlinks,
)
setup.add_command(setup_sudoers)
setup.add_command(setup_nginx)
setup.add_command(reload_nginx)
setup.add_command(setup_supervisor)
setup.add_command(setup_redis)
setup.add_command(setup_letsencrypt)
setup.add_command(setup_wildcard_ssl)
setup.add_command(setup_production)
setup.add_command(setup_backups)
setup.add_command(setup_env)
setup.add_command(setup_procfile)
setup.add_command(setup_socketio)
setup.add_command(setup_requirements)
setup.add_command(setup_manager)
setup.add_command(setup_config)
setup.add_command(setup_fonts)
setup.add_command(add_domain)
setup.add_command(remove_domain)
setup.add_command(sync_domains)
setup.add_command(setup_firewall)
setup.add_command(set_ssh_port)
setup.add_command(setup_roles)
setup.add_command(setup_nginx_proxy_jail)
setup.add_command(setup_systemd)
|
2302_79757062/bench
|
bench/commands/setup.py
|
Python
|
agpl-3.0
| 12,750
|
# imports - third party imports
import click
# imports - module imports
from bench.app import pull_apps
from bench.utils.bench import post_upgrade, patch_sites, build_assets
@click.command(
"update",
help="Performs an update operation on current bench. Without any flags will backup, pull, setup requirements, build, run patches and restart bench. Using specific flags will only do certain tasks instead of all",
)
@click.option("--pull", is_flag=True, help="Pull updates for all the apps in bench")
@click.option("--apps", type=str)
@click.option("--patch", is_flag=True, help="Run migrations for all sites in the bench")
@click.option("--build", is_flag=True, help="Build JS and CSS assets for the bench")
@click.option(
"--requirements",
is_flag=True,
help="Update requirements. If run alone, equivalent to `bench setup requirements`",
)
@click.option(
"--restart-supervisor", is_flag=True, help="Restart supervisor processes after update"
)
@click.option(
"--restart-systemd", is_flag=True, help="Restart systemd units after update"
)
@click.option(
"--no-backup",
is_flag=True,
help="If this flag is set, sites won't be backed up prior to updates. Note: This is not recommended in production.",
)
@click.option(
"--no-compile",
is_flag=True,
help="[DEPRECATED] This flag doesn't do anything now.",
)
@click.option("--force", is_flag=True, help="Forces major version upgrades")
@click.option(
"--reset",
is_flag=True,
help="Hard resets git branch's to their new states overriding any changes and overriding rebase on pull",
)
def update(
pull,
apps,
patch,
build,
requirements,
restart_supervisor,
restart_systemd,
no_backup,
no_compile,
force,
reset,
):
from bench.utils.bench import update
update(
pull=pull,
apps=apps,
patch=patch,
build=build,
requirements=requirements,
restart_supervisor=restart_supervisor,
restart_systemd=restart_systemd,
backup=not no_backup,
compile=not no_compile,
force=force,
reset=reset,
)
@click.command("retry-upgrade", help="Retry a failed upgrade")
@click.option("--version", default=5)
def retry_upgrade(version):
pull_apps()
patch_sites()
build_assets()
post_upgrade(version - 1, version)
@click.command(
"switch-to-branch",
help="Switch all apps to specified branch, or specify apps separated by space",
)
@click.argument("branch")
@click.argument("apps", nargs=-1)
@click.option("--upgrade", is_flag=True)
def switch_to_branch(branch, apps, upgrade=False):
from bench.utils.app import switch_to_branch
switch_to_branch(branch=branch, apps=list(apps), upgrade=upgrade)
@click.command("switch-to-develop")
def switch_to_develop(upgrade=False):
"Switch frappe and erpnext to develop branch"
from bench.utils.app import switch_to_develop
switch_to_develop(apps=["frappe", "erpnext"])
|
2302_79757062/bench
|
bench/commands/update.py
|
Python
|
agpl-3.0
| 2,798
|
# imports - standard imports
import os
# imports - third party imports
import click
@click.command("start", help="Start Frappe development processes")
@click.option("--no-dev", is_flag=True, default=False)
@click.option(
"--no-prefix",
is_flag=True,
default=False,
help="Hide process name from bench start log",
)
@click.option("--concurrency", "-c", type=str)
@click.option("--procfile", "-p", type=str)
@click.option("--man", "-m", help="Process Manager of your choice ;)")
def start(no_dev, concurrency, procfile, no_prefix, man):
from bench.utils.system import start
start(
no_dev=no_dev,
concurrency=concurrency,
procfile=procfile,
no_prefix=no_prefix,
procman=man,
)
@click.command("restart", help="Restart supervisor processes or systemd units")
@click.option("--web", is_flag=True, default=False)
@click.option("--supervisor", is_flag=True, default=False)
@click.option("--systemd", is_flag=True, default=False)
def restart(web, supervisor, systemd):
from bench.bench import Bench
if not systemd and not web:
supervisor = True
Bench(".").reload(web, supervisor, systemd)
@click.command("set-nginx-port", help="Set NGINX port for site")
@click.argument("site")
@click.argument("port", type=int)
def set_nginx_port(site, port):
from bench.config.site_config import set_nginx_port
set_nginx_port(site, port)
@click.command("set-ssl-certificate", help="Set SSL certificate path for site")
@click.argument("site")
@click.argument("ssl-certificate-path")
def set_ssl_certificate(site, ssl_certificate_path):
from bench.config.site_config import set_ssl_certificate
set_ssl_certificate(site, ssl_certificate_path)
@click.command("set-ssl-key", help="Set SSL certificate private key path for site")
@click.argument("site")
@click.argument("ssl-certificate-key-path")
def set_ssl_certificate_key(site, ssl_certificate_key_path):
from bench.config.site_config import set_ssl_certificate_key
set_ssl_certificate_key(site, ssl_certificate_key_path)
@click.command("set-url-root", help="Set URL root for site")
@click.argument("site")
@click.argument("url-root")
def set_url_root(site, url_root):
from bench.config.site_config import set_url_root
set_url_root(site, url_root)
@click.command("set-mariadb-host", help="Set MariaDB host for bench")
@click.argument("host")
def set_mariadb_host(host):
from bench.utils.bench import set_mariadb_host
set_mariadb_host(host)
@click.command("set-redis-cache-host", help="Set Redis cache host for bench")
@click.argument("host")
def set_redis_cache_host(host):
"""
Usage: bench set-redis-cache-host localhost:6379/1
"""
from bench.utils.bench import set_redis_cache_host
set_redis_cache_host(host)
@click.command("set-redis-queue-host", help="Set Redis queue host for bench")
@click.argument("host")
def set_redis_queue_host(host):
"""
Usage: bench set-redis-queue-host localhost:6379/2
"""
from bench.utils.bench import set_redis_queue_host
set_redis_queue_host(host)
@click.command("set-redis-socketio-host", help="Set Redis socketio host for bench")
@click.argument("host")
def set_redis_socketio_host(host):
"""
Usage: bench set-redis-socketio-host localhost:6379/3
"""
from bench.utils.bench import set_redis_socketio_host
set_redis_socketio_host(host)
@click.command("download-translations", help="Download latest translations")
def download_translations():
from bench.utils.translation import download_translations_p
download_translations_p()
@click.command(
"renew-lets-encrypt", help="Sets Up latest cron and Renew Let's Encrypt certificate"
)
def renew_lets_encrypt():
from bench.config.lets_encrypt import renew_certs
renew_certs()
@click.command("backup-all-sites", help="Backup all sites in current bench")
def backup_all_sites():
from bench.utils.system import backup_all_sites
backup_all_sites(bench_path=".")
@click.command(
"disable-production", help="Disables production environment for the bench."
)
def disable_production():
from bench.config.production_setup import disable_production
disable_production(bench_path=".")
@click.command(
"src", help="Prints bench source folder path, which can be used as: cd `bench src`"
)
def bench_src():
from bench.cli import src
print(os.path.dirname(src))
@click.command("find", help="Finds benches recursively from location")
@click.argument("location", default="")
def find_benches(location):
from bench.utils import find_benches
find_benches(directory=location)
@click.command(
"migrate-env", help="Migrate Virtual Environment to desired Python Version"
)
@click.argument("python", type=str)
@click.option("--no-backup", "backup", is_flag=True, default=True)
def migrate_env(python, backup=True):
from bench.utils.bench import migrate_env
migrate_env(python=python, backup=backup)
@click.command("app-cache", help="View or remove items belonging to bench get-app cache")
@click.option("--clear", is_flag=True, default=False, help="Remove all items")
@click.option(
"--remove-app",
default="",
help="Removes all items that match provided app name",
)
@click.option(
"--remove-key",
default="",
help="Removes all items that matches provided cache key",
)
def app_cache_helper(clear=False, remove_app="", remove_key=""):
from bench.utils.bench import cache_helper
cache_helper(clear, remove_app, remove_key)
|
2302_79757062/bench
|
bench/commands/utils.py
|
Python
|
agpl-3.0
| 5,331
|
"""Module for setting up system and respective bench configurations"""
def env():
from jinja2 import Environment, PackageLoader
return Environment(loader=PackageLoader("bench.config"))
|
2302_79757062/bench
|
bench/config/__init__.py
|
Python
|
agpl-3.0
| 190
|
# imports - standard imports
import getpass
import json
import os
default_config = {
"restart_supervisor_on_update": False,
"restart_systemd_on_update": False,
"serve_default_site": True,
"rebase_on_pull": False,
"frappe_user": getpass.getuser(),
"shallow_clone": True,
"background_workers": 1,
"use_redis_auth": False,
"live_reload": True,
}
DEFAULT_MAX_REQUESTS = 5000
def setup_config(bench_path, additional_config=None):
make_pid_folder(bench_path)
bench_config = get_config(bench_path)
bench_config.update(default_config)
bench_config.update(get_gunicorn_workers())
update_config_for_frappe(bench_config, bench_path)
if additional_config:
bench_config.update(additional_config)
put_config(bench_config, bench_path)
def get_config(bench_path):
return get_common_site_config(bench_path)
def get_common_site_config(bench_path):
config_path = get_config_path(bench_path)
if not os.path.exists(config_path):
return {}
with open(config_path) as f:
return json.load(f)
def put_config(config, bench_path="."):
config_path = get_config_path(bench_path)
with open(config_path, "w") as f:
return json.dump(config, f, indent=1, sort_keys=True)
def update_config(new_config, bench_path="."):
config = get_config(bench_path=bench_path)
config.update(new_config)
put_config(config, bench_path=bench_path)
def get_config_path(bench_path):
return os.path.join(bench_path, "sites", "common_site_config.json")
def get_gunicorn_workers():
"""This function will return the maximum workers that can be started depending upon
number of cpu's present on the machine"""
import multiprocessing
return {"gunicorn_workers": multiprocessing.cpu_count() * 2 + 1}
def compute_max_requests_jitter(max_requests: int) -> int:
return int(max_requests * 0.1)
def get_default_max_requests(worker_count: int):
"""Get max requests and jitter config based on number of available workers."""
if worker_count <= 1:
# If there's only one worker then random restart can cause spikes in response times and
# can be annoying. Hence not enabled by default.
return 0
return DEFAULT_MAX_REQUESTS
def update_config_for_frappe(config, bench_path):
ports = make_ports(bench_path)
for key in ("redis_cache", "redis_queue", "redis_socketio"):
if key not in config:
config[key] = f"redis://127.0.0.1:{ports[key]}"
for key in ("webserver_port", "socketio_port", "file_watcher_port"):
if key not in config:
config[key] = ports[key]
def make_ports(bench_path):
from urllib.parse import urlparse
benches_path = os.path.dirname(os.path.abspath(bench_path))
default_ports = {
"webserver_port": 8000,
"socketio_port": 9000,
"file_watcher_port": 6787,
"redis_queue": 11000,
"redis_socketio": 13000,
"redis_cache": 13000,
}
# collect all existing ports
existing_ports = {}
for folder in os.listdir(benches_path):
bench_path = os.path.join(benches_path, folder)
if os.path.isdir(bench_path):
bench_config = get_config(bench_path)
for key in list(default_ports.keys()):
value = bench_config.get(key)
# extract port from redis url
if value and (key in ("redis_cache", "redis_queue", "redis_socketio")):
value = urlparse(value).port
if value:
existing_ports.setdefault(key, []).append(value)
# new port value = max of existing port value + 1
ports = {}
for key, value in list(default_ports.items()):
existing_value = existing_ports.get(key, [])
if existing_value:
value = max(existing_value) + 1
ports[key] = value
# Backward compatbility: always keep redis_cache and redis_socketio port same
# Note: not required from v15
ports["redis_socketio"] = ports["redis_cache"]
return ports
def make_pid_folder(bench_path):
pids_path = os.path.join(bench_path, "config", "pids")
if not os.path.exists(pids_path):
os.makedirs(pids_path)
|
2302_79757062/bench
|
bench/config/common_site_config.py
|
Python
|
agpl-3.0
| 3,841
|
# imports - standard imports
import os
# imports - third party imports
import click
# imports - module imports
import bench
from bench.config.nginx import make_nginx_conf
from bench.config.production_setup import service
from bench.config.site_config import get_domains, remove_domain, update_site_config
from bench.bench import Bench
from bench.utils import exec_cmd, which
from bench.utils.bench import update_common_site_config
from bench.exceptions import CommandFailedError
def setup_letsencrypt(site, custom_domain, bench_path, interactive):
site_path = os.path.join(bench_path, "sites", site, "site_config.json")
if not os.path.exists(os.path.dirname(site_path)):
print("No site named " + site)
return
if custom_domain:
domains = get_domains(site, bench_path)
for d in domains:
if isinstance(d, dict) and d["domain"] == custom_domain:
print(f"SSL for Domain {custom_domain} already exists")
return
if custom_domain not in domains:
print(f"No custom domain named {custom_domain} set for site")
return
if interactive:
click.confirm(
"Running this will stop the nginx service temporarily causing your sites to go offline\n"
"Do you want to continue?",
abort=True,
)
if not Bench(bench_path).conf.get("dns_multitenant"):
print("You cannot setup SSL without DNS Multitenancy")
return
create_config(site, custom_domain)
run_certbot_and_setup_ssl(site, custom_domain, bench_path, interactive)
setup_crontab()
def create_config(site, custom_domain):
config = (
bench.config.env()
.get_template("letsencrypt.cfg")
.render(domain=custom_domain or site)
)
config_path = f"/etc/letsencrypt/configs/{custom_domain or site}.cfg"
create_dir_if_missing(config_path)
with open(config_path, "w") as f:
f.write(config)
def run_certbot_and_setup_ssl(site, custom_domain, bench_path, interactive=True):
service("nginx", "stop")
try:
interactive = "" if interactive else "-n"
exec_cmd(
f"{get_certbot_path()} {interactive} --config /etc/letsencrypt/configs/{custom_domain or site}.cfg certonly"
)
except CommandFailedError:
service("nginx", "start")
print("There was a problem trying to setup SSL for your site")
return
ssl_path = f"/etc/letsencrypt/live/{custom_domain or site}/"
ssl_config = {
"ssl_certificate": os.path.join(ssl_path, "fullchain.pem"),
"ssl_certificate_key": os.path.join(ssl_path, "privkey.pem"),
}
if custom_domain:
remove_domain(site, custom_domain, bench_path)
domains = get_domains(site, bench_path)
ssl_config["domain"] = custom_domain
domains.append(ssl_config)
update_site_config(site, {"domains": domains}, bench_path=bench_path)
else:
update_site_config(site, ssl_config, bench_path=bench_path)
make_nginx_conf(bench_path)
service("nginx", "start")
def setup_crontab():
from crontab import CronTab
job_command = (
f'{get_certbot_path()} renew -a nginx --post-hook "systemctl reload nginx"'
)
job_comment = "Renew lets-encrypt every month"
print(f"Setting Up cron job to {job_comment}")
system_crontab = CronTab(user="root")
for job in system_crontab.find_comment(comment=job_comment): # Removes older entries
system_crontab.remove(job)
job = system_crontab.new(command=job_command, comment=job_comment)
job.setall("0 0 */1 * *") # Run at 00:00 every day-of-month
system_crontab.write()
def create_dir_if_missing(path):
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
def get_certbot_path():
try:
return which("certbot", raise_err=True)
except FileNotFoundError:
raise CommandFailedError(
"Certbot is not installed on your system. Please visit https://certbot.eff.org/instructions for installation instructions, then try again."
)
def renew_certs():
# Needs to be run with sudo
click.confirm(
"Running this will stop the nginx service temporarily causing your sites to go offline\n"
"Do you want to continue?",
abort=True,
)
setup_crontab()
service("nginx", "stop")
exec_cmd(f"{get_certbot_path()} renew")
service("nginx", "start")
def setup_wildcard_ssl(domain, email, bench_path, exclude_base_domain):
def _get_domains(domain):
domain_list = [domain]
if not domain.startswith("*."):
# add wildcard caracter to domain if missing
domain_list.append(f"*.{domain}")
else:
# include base domain based on flag
domain_list.append(domain.replace("*.", ""))
if exclude_base_domain:
domain_list.remove(domain.replace("*.", ""))
return domain_list
if not Bench(bench_path).conf.get("dns_multitenant"):
print("You cannot setup SSL without DNS Multitenancy")
return
domain_list = _get_domains(domain.strip())
email_param = ""
if email:
email_param = f"--email {email}"
try:
exec_cmd(
f"{get_certbot_path()} certonly --manual --preferred-challenges=dns {email_param} \
--server https://acme-v02.api.letsencrypt.org/directory \
--agree-tos -d {' -d '.join(domain_list)}"
)
except CommandFailedError:
print("There was a problem trying to setup SSL")
return
ssl_path = f"/etc/letsencrypt/live/{domain}/"
ssl_config = {
"wildcard": {
"domain": domain,
"ssl_certificate": os.path.join(ssl_path, "fullchain.pem"),
"ssl_certificate_key": os.path.join(ssl_path, "privkey.pem"),
}
}
update_common_site_config(ssl_config)
setup_crontab()
make_nginx_conf(bench_path)
print("Restrting Nginx service")
service("nginx", "restart")
|
2302_79757062/bench
|
bench/config/lets_encrypt.py
|
Python
|
agpl-3.0
| 5,423
|
# imports - standard imports
import hashlib
import os
import random
import string
# imports - third party imports
import click
# imports - module imports
import bench
import bench.config
from bench.bench import Bench
from bench.utils import get_bench_name
def make_nginx_conf(bench_path, yes=False, logging=None, log_format=None):
conf_path = os.path.join(bench_path, "config", "nginx.conf")
if not yes and os.path.exists(conf_path):
if not click.confirm(
"nginx.conf already exists and this will overwrite it. Do you want to continue?"
):
return
template = bench.config.env().get_template("nginx.conf")
bench_path = os.path.abspath(bench_path)
sites_path = os.path.join(bench_path, "sites")
config = Bench(bench_path).conf
sites = prepare_sites(config, bench_path)
bench_name = get_bench_name(bench_path)
allow_rate_limiting = config.get("allow_rate_limiting", False)
template_vars = {
"sites_path": sites_path,
"http_timeout": config.get("http_timeout"),
"sites": sites,
"webserver_port": config.get("webserver_port"),
"socketio_port": config.get("socketio_port"),
"bench_name": bench_name,
"error_pages": get_error_pages(),
"allow_rate_limiting": allow_rate_limiting,
# for nginx map variable
"random_string": "".join(random.choice(string.ascii_lowercase) for i in range(7)),
}
if logging and logging != "none":
_log_format = ""
if log_format and log_format != "none":
_log_format = log_format
template_vars["logging"] = {"level": logging, "log_format": _log_format}
if allow_rate_limiting:
template_vars.update(
{
"bench_name_hash": hashlib.sha256(bench_name).hexdigest()[:16],
"limit_conn_shared_memory": get_limit_conn_shared_memory(),
}
)
nginx_conf = template.render(**template_vars)
with open(conf_path, "w") as f:
f.write(nginx_conf)
def make_bench_manager_nginx_conf(bench_path, yes=False, port=23624, domain=None):
from bench.config.site_config import get_site_config
template = bench.config.env().get_template("bench_manager_nginx.conf")
bench_path = os.path.abspath(bench_path)
sites_path = os.path.join(bench_path, "sites")
config = Bench(bench_path).conf
site_config = get_site_config(domain, bench_path=bench_path)
bench_name = get_bench_name(bench_path)
template_vars = {
"port": port,
"domain": domain,
"bench_manager_site_name": "bench-manager.local",
"sites_path": sites_path,
"http_timeout": config.get("http_timeout"),
"webserver_port": config.get("webserver_port"),
"socketio_port": config.get("socketio_port"),
"bench_name": bench_name,
"error_pages": get_error_pages(),
"ssl_certificate": site_config.get("ssl_certificate"),
"ssl_certificate_key": site_config.get("ssl_certificate_key"),
}
bench_manager_nginx_conf = template.render(**template_vars)
conf_path = os.path.join(bench_path, "config", "nginx.conf")
if not yes and os.path.exists(conf_path):
click.confirm(
"nginx.conf already exists and bench-manager configuration will be appended to it. Do you want to continue?",
abort=True,
)
with open(conf_path, "a") as myfile:
myfile.write(bench_manager_nginx_conf)
def prepare_sites(config, bench_path):
sites = {
"that_use_port": [],
"that_use_dns": [],
"that_use_ssl": [],
"that_use_wildcard_ssl": [],
}
domain_map = {}
ports_in_use = {}
dns_multitenant = config.get("dns_multitenant")
shared_port_exception_found = False
sites_configs = get_sites_with_config(bench_path=bench_path)
# preload all preset site ports to avoid conflicts
if not dns_multitenant:
for site in sites_configs:
if site.get("port"):
if not site["port"] in ports_in_use:
ports_in_use[site["port"]] = []
ports_in_use[site["port"]].append(site["name"])
for site in sites_configs:
if dns_multitenant:
domain = site.get("domain")
if domain:
# when site's folder name is different than domain name
domain_map[domain] = site["name"]
site_name = domain or site["name"]
if site.get("wildcard"):
sites["that_use_wildcard_ssl"].append(site_name)
if not sites.get("wildcard_ssl_certificate"):
sites["wildcard_ssl_certificate"] = site["ssl_certificate"]
sites["wildcard_ssl_certificate_key"] = site["ssl_certificate_key"]
elif site.get("ssl_certificate") and site.get("ssl_certificate_key"):
sites["that_use_ssl"].append(site)
else:
sites["that_use_dns"].append(site_name)
else:
if not site.get("port"):
site["port"] = 80
if site["port"] in ports_in_use:
site["port"] = 8001
while site["port"] in ports_in_use:
site["port"] += 1
if site["port"] in ports_in_use and not site["name"] in ports_in_use[site["port"]]:
shared_port_exception_found = True
ports_in_use[site["port"]].append(site["name"])
else:
ports_in_use[site["port"]] = []
ports_in_use[site["port"]].append(site["name"])
sites["that_use_port"].append(site)
if not dns_multitenant and shared_port_exception_found:
message = "Port conflicts found:"
port_conflict_index = 0
for port_number in ports_in_use:
if len(ports_in_use[port_number]) > 1:
port_conflict_index += 1
message += f"\n{port_conflict_index} - Port {port_number} is shared among sites:"
for site_name in ports_in_use[port_number]:
message += f" {site_name}"
raise Exception(message)
if not dns_multitenant:
message = "Port configuration list:"
for site in sites_configs:
message += f"\n\nSite {site['name']} assigned port: {site['port']}"
print(message)
sites["domain_map"] = domain_map
return sites
def get_sites_with_config(bench_path):
from bench.bench import Bench
from bench.config.site_config import get_site_config
bench = Bench(bench_path)
sites = bench.sites
conf = bench.conf
dns_multitenant = conf.get("dns_multitenant")
ret = []
for site in sites:
try:
site_config = get_site_config(site, bench_path=bench_path)
except Exception as e:
strict_nginx = conf.get("strict_nginx")
if strict_nginx:
print(
f"\n\nERROR: The site config for the site {site} is broken.",
"If you want this command to pass, instead of just throwing an error,",
"You may remove the 'strict_nginx' flag from common_site_config.json or set it to 0",
"\n\n",
)
raise e
else:
print(
f"\n\nWARNING: The site config for the site {site} is broken.",
"If you want this command to fail, instead of just showing a warning,",
"You may add the 'strict_nginx' flag to common_site_config.json and set it to 1",
"\n\n",
)
continue
ret.append(
{
"name": site,
"port": site_config.get("nginx_port"),
"ssl_certificate": site_config.get("ssl_certificate"),
"ssl_certificate_key": site_config.get("ssl_certificate_key"),
}
)
if dns_multitenant and site_config.get("domains"):
for domain in site_config.get("domains"):
# domain can be a string or a dict with 'domain', 'ssl_certificate', 'ssl_certificate_key'
if isinstance(domain, str):
domain = {"domain": domain}
domain["name"] = site
ret.append(domain)
use_wildcard_certificate(bench_path, ret)
return ret
def use_wildcard_certificate(bench_path, ret):
"""
stored in common_site_config.json as:
"wildcard": {
"domain": "*.erpnext.com",
"ssl_certificate": "/path/to/erpnext.com.cert",
"ssl_certificate_key": "/path/to/erpnext.com.key"
}
"""
from bench.bench import Bench
config = Bench(bench_path).conf
wildcard = config.get("wildcard")
if not wildcard:
return
domain = wildcard["domain"]
ssl_certificate = wildcard["ssl_certificate"]
ssl_certificate_key = wildcard["ssl_certificate_key"]
# If domain is set as "*" all domains will be included
if domain.startswith("*"):
domain = domain[1:]
else:
domain = "." + domain
for site in ret:
if site.get("ssl_certificate"):
continue
if (site.get("domain") or site["name"]).endswith(domain):
# example: ends with .erpnext.com
site["ssl_certificate"] = ssl_certificate
site["ssl_certificate_key"] = ssl_certificate_key
site["wildcard"] = 1
def get_error_pages():
bench_app_path = os.path.abspath(bench.__path__[0])
templates = os.path.join(bench_app_path, "config", "templates")
return {502: os.path.join(templates, "502.html")}
def get_limit_conn_shared_memory():
"""Allocate 2 percent of total virtual memory as shared memory for nginx limit_conn_zone"""
total_vm = (os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES")) / (
1024 * 1024
) # in MB
return int(0.02 * total_vm)
|
2302_79757062/bench
|
bench/config/nginx.py
|
Python
|
agpl-3.0
| 8,544
|
import os
import platform
import click
import bench
from bench.app import use_rq
from bench.bench import Bench
from bench.utils import which
def setup_procfile(bench_path, yes=False, skip_redis=False):
config = Bench(bench_path).conf
procfile_path = os.path.join(bench_path, "Procfile")
is_mac = platform.system() == "Darwin"
if not yes and os.path.exists(procfile_path):
click.confirm(
"A Procfile already exists and this will overwrite it. Do you want to continue?",
abort=True,
)
procfile = (
bench.config.env()
.get_template("Procfile")
.render(
node=which("node") or which("nodejs"),
use_rq=use_rq(bench_path),
webserver_port=config.get("webserver_port"),
CI=os.environ.get("CI"),
skip_redis=skip_redis,
workers=config.get("workers", {}),
is_mac=is_mac,
)
)
with open(procfile_path, "w") as f:
f.write(procfile)
|
2302_79757062/bench
|
bench/config/procfile.py
|
Python
|
agpl-3.0
| 872
|
# imports - standard imports
import contextlib
import os
import logging
import sys
# imports - module imports
import bench
from bench.config.nginx import make_nginx_conf
from bench.config.supervisor import (
generate_supervisor_config,
check_supervisord_config,
)
from bench.config.systemd import generate_systemd_config
from bench.bench import Bench
from bench.utils import exec_cmd, which, get_bench_name, get_cmd_output, log
from bench.utils.system import fix_prod_setup_perms
from bench.exceptions import CommandFailedError
logger = logging.getLogger(bench.PROJECT_NAME)
def setup_production_prerequisites():
"""Installs ansible, fail2banc, NGINX and supervisor"""
if not which("ansible"):
exec_cmd(f"sudo {sys.executable} -m pip install ansible")
if not which("fail2ban-client"):
exec_cmd("bench setup role fail2ban")
if not which("nginx"):
exec_cmd("bench setup role nginx")
if not which("supervisord"):
exec_cmd("bench setup role supervisor")
def setup_production(user, bench_path=".", yes=False):
print("Setting Up prerequisites...")
setup_production_prerequisites()
conf = Bench(bench_path).conf
if conf.get("restart_supervisor_on_update") and conf.get("restart_systemd_on_update"):
raise Exception(
"You cannot use supervisor and systemd at the same time. Modify your common_site_config accordingly."
)
if conf.get("restart_systemd_on_update"):
print("Setting Up systemd...")
generate_systemd_config(bench_path=bench_path, user=user, yes=yes)
else:
print("Setting Up supervisor...")
check_supervisord_config(user=user)
generate_supervisor_config(bench_path=bench_path, user=user, yes=yes)
print("Setting Up NGINX...")
make_nginx_conf(bench_path=bench_path, yes=yes)
fix_prod_setup_perms(bench_path, frappe_user=user)
remove_default_nginx_configs()
bench_name = get_bench_name(bench_path)
nginx_conf = f"/etc/nginx/conf.d/{bench_name}.conf"
print("Setting Up symlinks and reloading services...")
if conf.get("restart_supervisor_on_update"):
supervisor_conf_extn = "ini" if is_centos7() else "conf"
supervisor_conf = os.path.join(
get_supervisor_confdir(), f"{bench_name}.{supervisor_conf_extn}"
)
# Check if symlink exists, If not then create it.
if not os.path.islink(supervisor_conf):
os.symlink(
os.path.abspath(os.path.join(bench_path, "config", "supervisor.conf")),
supervisor_conf,
)
if not os.path.islink(nginx_conf):
os.symlink(
os.path.abspath(os.path.join(bench_path, "config", "nginx.conf")), nginx_conf
)
if conf.get("restart_supervisor_on_update"):
reload_supervisor()
if os.environ.get("NO_SERVICE_RESTART"):
return
reload_nginx()
def disable_production(bench_path="."):
bench_name = get_bench_name(bench_path)
conf = Bench(bench_path).conf
# supervisorctl
supervisor_conf_extn = "ini" if is_centos7() else "conf"
supervisor_conf = os.path.join(
get_supervisor_confdir(), f"{bench_name}.{supervisor_conf_extn}"
)
if os.path.islink(supervisor_conf):
os.unlink(supervisor_conf)
if conf.get("restart_supervisor_on_update"):
reload_supervisor()
# nginx
nginx_conf = f"/etc/nginx/conf.d/{bench_name}.conf"
if os.path.islink(nginx_conf):
os.unlink(nginx_conf)
reload_nginx()
def service(service_name, service_option):
if os.path.basename(which("systemctl") or "") == "systemctl" and is_running_systemd():
exec_cmd(f"sudo systemctl {service_option} {service_name}")
elif os.path.basename(which("service") or "") == "service":
exec_cmd(f"sudo service {service_name} {service_option}")
else:
# look for 'service_manager' and 'service_manager_command' in environment
service_manager = os.environ.get("BENCH_SERVICE_MANAGER")
if service_manager:
service_manager_command = (
os.environ.get("BENCH_SERVICE_MANAGER_COMMAND")
or f"{service_manager} {service_option} {service}"
)
exec_cmd(service_manager_command)
else:
log(
f"No service manager found: '{service_name} {service_option}' failed to execute",
level=2,
)
def get_supervisor_confdir():
possiblities = (
"/etc/supervisor/conf.d",
"/etc/supervisor.d/",
"/etc/supervisord/conf.d",
"/etc/supervisord.d",
)
for possiblity in possiblities:
if os.path.exists(possiblity):
return possiblity
def remove_default_nginx_configs():
default_nginx_configs = [
"/etc/nginx/conf.d/default.conf",
"/etc/nginx/sites-enabled/default",
]
for conf_file in default_nginx_configs:
if os.path.exists(conf_file):
os.unlink(conf_file)
def is_centos7():
return (
os.path.exists("/etc/redhat-release")
and get_cmd_output(
r"cat /etc/redhat-release | sed 's/Linux\ //g' | cut -d' ' -f3 | cut -d. -f1"
).strip()
== "7"
)
def is_running_systemd():
with open("/proc/1/comm") as f:
comm = f.read().strip()
if comm == "init":
return False
elif comm == "systemd":
return True
return False
def reload_supervisor():
supervisorctl = which("supervisorctl")
with contextlib.suppress(CommandFailedError):
# first try reread/update
exec_cmd(f"{supervisorctl} reread")
exec_cmd(f"{supervisorctl} update")
return
with contextlib.suppress(CommandFailedError):
# something is wrong, so try reloading
exec_cmd(f"{supervisorctl} reload")
return
with contextlib.suppress(CommandFailedError):
# then try restart for centos
service("supervisord", "restart")
return
with contextlib.suppress(CommandFailedError):
# else try restart for ubuntu / debian
service("supervisor", "restart")
return
def reload_nginx():
exec_cmd(f"sudo {which('nginx')} -t")
service("nginx", "reload")
|
2302_79757062/bench
|
bench/config/production_setup.py
|
Python
|
agpl-3.0
| 5,574
|
# imports - standard imports
import os
import re
import subprocess
# imports - module imports
import bench
def generate_config(bench_path):
from urllib.parse import urlparse
from bench.bench import Bench
config = Bench(bench_path).conf
redis_version = get_redis_version()
ports = {}
for key in ("redis_cache", "redis_queue"):
ports[key] = urlparse(config[key]).port
write_redis_config(
template_name="redis_queue.conf",
context={
"port": ports["redis_queue"],
"bench_path": os.path.abspath(bench_path),
"redis_version": redis_version,
},
bench_path=bench_path,
)
write_redis_config(
template_name="redis_cache.conf",
context={
"maxmemory": config.get("cache_maxmemory", get_max_redis_memory()),
"port": ports["redis_cache"],
"redis_version": redis_version,
},
bench_path=bench_path,
)
# make pids folder
pid_path = os.path.join(bench_path, "config", "pids")
if not os.path.exists(pid_path):
os.makedirs(pid_path)
# ACL feature is introduced in Redis 6.0
if redis_version < 6.0:
return
# make ACL files
acl_rq_path = os.path.join(bench_path, "config", "redis_queue.acl")
acl_redis_cache_path = os.path.join(bench_path, "config", "redis_cache.acl")
open(acl_rq_path, "a").close()
open(acl_redis_cache_path, "a").close()
def write_redis_config(template_name, context, bench_path):
template = bench.config.env().get_template(template_name)
if "config_path" not in context:
context["config_path"] = os.path.abspath(os.path.join(bench_path, "config"))
if "pid_path" not in context:
context["pid_path"] = os.path.join(context["config_path"], "pids")
with open(os.path.join(bench_path, "config", template_name), "w") as f:
f.write(template.render(**context))
def get_redis_version():
import semantic_version
version_string = subprocess.check_output("redis-server --version", shell=True)
version_string = version_string.decode("utf-8").strip()
# extract version number from string
version = re.findall(r"\d+\.\d+", version_string)
if not version:
return None
version = semantic_version.Version(version[0], partial=True)
return float(f"{version.major}.{version.minor}")
def get_max_redis_memory():
try:
max_mem = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES")
except ValueError:
max_mem = int(subprocess.check_output(["sysctl", "-n", "hw.memsize"]).strip())
return max(50, int((max_mem / (1024.0**2)) * 0.05))
|
2302_79757062/bench
|
bench/config/redis.py
|
Python
|
agpl-3.0
| 2,418
|
# imports - standard imports
import json
import os
from collections import defaultdict
def get_site_config(site, bench_path="."):
config_path = os.path.join(bench_path, "sites", site, "site_config.json")
if not os.path.exists(config_path):
return {}
with open(config_path) as f:
return json.load(f)
def put_site_config(site, config, bench_path="."):
config_path = os.path.join(bench_path, "sites", site, "site_config.json")
with open(config_path, "w") as f:
return json.dump(config, f, indent=1)
def update_site_config(site, new_config, bench_path="."):
config = get_site_config(site, bench_path=bench_path)
config.update(new_config)
put_site_config(site, config, bench_path=bench_path)
def set_nginx_port(site, port, bench_path=".", gen_config=True):
set_site_config_nginx_property(
site, {"nginx_port": port}, bench_path=bench_path, gen_config=gen_config
)
def set_ssl_certificate(site, ssl_certificate, bench_path=".", gen_config=True):
set_site_config_nginx_property(
site,
{"ssl_certificate": ssl_certificate},
bench_path=bench_path,
gen_config=gen_config,
)
def set_ssl_certificate_key(site, ssl_certificate_key, bench_path=".", gen_config=True):
set_site_config_nginx_property(
site,
{"ssl_certificate_key": ssl_certificate_key},
bench_path=bench_path,
gen_config=gen_config,
)
def set_site_config_nginx_property(site, config, bench_path=".", gen_config=True):
from bench.config.nginx import make_nginx_conf
from bench.bench import Bench
if site not in Bench(bench_path).sites:
raise Exception("No such site")
update_site_config(site, config, bench_path=bench_path)
if gen_config:
make_nginx_conf(bench_path=bench_path)
def set_url_root(site, url_root, bench_path="."):
update_site_config(site, {"host_name": url_root}, bench_path=bench_path)
def add_domain(site, domain, ssl_certificate, ssl_certificate_key, bench_path="."):
domains = get_domains(site, bench_path)
for d in domains:
if (isinstance(d, dict) and d["domain"] == domain) or d == domain:
print(f"Domain {domain} already exists")
return
if ssl_certificate_key and ssl_certificate:
domain = {
"domain": domain,
"ssl_certificate": ssl_certificate,
"ssl_certificate_key": ssl_certificate_key,
}
domains.append(domain)
update_site_config(site, {"domains": domains}, bench_path=bench_path)
def remove_domain(site, domain, bench_path="."):
domains = get_domains(site, bench_path)
for i, d in enumerate(domains):
if (isinstance(d, dict) and d["domain"] == domain) or d == domain:
domains.remove(d)
break
update_site_config(site, {"domains": domains}, bench_path=bench_path)
def sync_domains(site, domains, bench_path="."):
"""Checks if there is a change in domains. If yes, updates the domains list."""
changed = False
existing_domains = get_domains_dict(get_domains(site, bench_path))
new_domains = get_domains_dict(domains)
if set(existing_domains.keys()) != set(new_domains.keys()):
changed = True
else:
for d in list(existing_domains.values()):
if d != new_domains.get(d["domain"]):
changed = True
break
if changed:
# replace existing domains with this one
update_site_config(site, {"domains": domains}, bench_path=".")
return changed
def get_domains(site, bench_path="."):
return get_site_config(site, bench_path=bench_path).get("domains") or []
def get_domains_dict(domains):
domains_dict = defaultdict(dict)
for d in domains:
if isinstance(d, str):
domains_dict[d] = {"domain": d}
elif isinstance(d, dict):
domains_dict[d["domain"]] = d
return domains_dict
|
2302_79757062/bench
|
bench/config/site_config.py
|
Python
|
agpl-3.0
| 3,589
|
# imports - standard imports
import getpass
import logging
import os
# imports - third party imports
import click
# imports - module imports
import bench
from bench.app import use_rq
from bench.bench import Bench
from bench.config.common_site_config import (
compute_max_requests_jitter,
get_config,
get_default_max_requests,
get_gunicorn_workers,
update_config,
)
from bench.utils import get_bench_name, which
logger = logging.getLogger(bench.PROJECT_NAME)
def generate_supervisor_config(bench_path, user=None, yes=False, skip_redis=False):
"""Generate supervisor config for respective bench path"""
if not user:
user = getpass.getuser()
config = Bench(bench_path).conf
template = bench.config.env().get_template("supervisor.conf")
bench_dir = os.path.abspath(bench_path)
web_worker_count = config.get(
"gunicorn_workers", get_gunicorn_workers()["gunicorn_workers"]
)
max_requests = config.get(
"gunicorn_max_requests", get_default_max_requests(web_worker_count)
)
config = template.render(
**{
"bench_dir": bench_dir,
"sites_dir": os.path.join(bench_dir, "sites"),
"user": user,
"use_rq": use_rq(bench_path),
"http_timeout": config.get("http_timeout", 120),
"redis_server": which("redis-server"),
"node": which("node") or which("nodejs"),
"redis_cache_config": os.path.join(bench_dir, "config", "redis_cache.conf"),
"redis_queue_config": os.path.join(bench_dir, "config", "redis_queue.conf"),
"webserver_port": config.get("webserver_port", 8000),
"gunicorn_workers": web_worker_count,
"gunicorn_max_requests": max_requests,
"gunicorn_max_requests_jitter": compute_max_requests_jitter(max_requests),
"bench_name": get_bench_name(bench_path),
"background_workers": config.get("background_workers") or 1,
"bench_cmd": which("bench"),
"skip_redis": skip_redis,
"workers": config.get("workers", {}),
"multi_queue_consumption": can_enable_multi_queue_consumption(bench_path),
"supervisor_startretries": 10,
}
)
conf_path = os.path.join(bench_path, "config", "supervisor.conf")
if not yes and os.path.exists(conf_path):
click.confirm(
"supervisor.conf already exists and this will overwrite it. Do you want to continue?",
abort=True,
)
with open(conf_path, "w") as f:
f.write(config)
update_config({"restart_supervisor_on_update": True}, bench_path=bench_path)
update_config({"restart_systemd_on_update": False}, bench_path=bench_path)
sync_socketio_port(bench_path)
def get_supervisord_conf():
"""Returns path of supervisord config from possible paths"""
possibilities = (
"supervisord.conf",
"etc/supervisord.conf",
"/etc/supervisord.conf",
"/etc/supervisor/supervisord.conf",
"/etc/supervisord.conf",
)
for possibility in possibilities:
if os.path.exists(possibility):
return possibility
def sync_socketio_port(bench_path):
# Backward compatbility: always keep redis_cache and redis_socketio port same
common_config = get_config(bench_path=bench_path)
socketio_port = common_config.get("redis_socketio")
cache_port = common_config.get("redis_cache")
if socketio_port and socketio_port != cache_port:
update_config({"redis_socketio": cache_port})
def can_enable_multi_queue_consumption(bench_path: str) -> bool:
try:
from semantic_version import Version
from bench.utils.app import get_current_version
supported_version = Version(major=14, minor=18, patch=0)
frappe_version = Version(get_current_version("frappe", bench_path=bench_path))
return frappe_version > supported_version
except Exception:
return False
def check_supervisord_config(user=None):
"""From bench v5.x, we're moving to supervisor running as user"""
# i don't think bench should be responsible for this but we're way past this now...
# removed updating supervisord conf & reload in Aug 2022 - gavin@frappe.io
import configparser
if not user:
user = getpass.getuser()
supervisord_conf = get_supervisord_conf()
section = "unix_http_server"
updated_values = {"chmod": "0760", "chown": f"{user}:{user}"}
supervisord_conf_changes = ""
if not supervisord_conf:
logger.log("supervisord.conf not found")
return
config = configparser.ConfigParser()
config.read(supervisord_conf)
if section not in config.sections():
config.add_section(section)
action = f"Section {section} Added"
logger.log(action)
supervisord_conf_changes += "\n" + action
for key, value in updated_values.items():
try:
current_value = config.get(section, key)
except configparser.NoOptionError:
current_value = ""
if current_value.strip() != value:
config.set(section, key, value)
action = (
f"Updated supervisord.conf: '{key}' changed from '{current_value}' to '{value}'"
)
logger.log(action)
supervisord_conf_changes += "\n" + action
if not supervisord_conf_changes:
logger.error("supervisord.conf not updated")
contents = "\n".join(f"{x}={y}" for x, y in updated_values.items())
print(
f"Update your {supervisord_conf} with the following values:\n[{section}]\n{contents}"
)
|
2302_79757062/bench
|
bench/config/supervisor.py
|
Python
|
agpl-3.0
| 5,050
|
# imports - standard imports
import getpass
import os
# imports - third partyimports
import click
# imports - module imports
import bench
from bench.app import use_rq
from bench.bench import Bench
from bench.config.common_site_config import (
get_gunicorn_workers,
update_config,
get_default_max_requests,
compute_max_requests_jitter,
)
from bench.utils import exec_cmd, which, get_bench_name
def generate_systemd_config(
bench_path,
user=None,
yes=False,
stop=False,
create_symlinks=False,
delete_symlinks=False,
):
if not user:
user = getpass.getuser()
config = Bench(bench_path).conf
bench_dir = os.path.abspath(bench_path)
bench_name = get_bench_name(bench_path)
if stop:
exec_cmd(
f"sudo systemctl stop -- $(systemctl show -p Requires {bench_name}.target | cut -d= -f2)"
)
return
if create_symlinks:
_create_symlinks(bench_path)
return
if delete_symlinks:
_delete_symlinks(bench_path)
return
number_of_workers = config.get("background_workers") or 1
background_workers = []
for i in range(number_of_workers):
background_workers.append(
get_bench_name(bench_path) + "-frappe-default-worker@" + str(i + 1) + ".service"
)
for i in range(number_of_workers):
background_workers.append(
get_bench_name(bench_path) + "-frappe-short-worker@" + str(i + 1) + ".service"
)
for i in range(number_of_workers):
background_workers.append(
get_bench_name(bench_path) + "-frappe-long-worker@" + str(i + 1) + ".service"
)
web_worker_count = config.get(
"gunicorn_workers", get_gunicorn_workers()["gunicorn_workers"]
)
max_requests = config.get(
"gunicorn_max_requests", get_default_max_requests(web_worker_count)
)
bench_info = {
"bench_dir": bench_dir,
"sites_dir": os.path.join(bench_dir, "sites"),
"user": user,
"use_rq": use_rq(bench_path),
"http_timeout": config.get("http_timeout", 120),
"redis_server": which("redis-server"),
"node": which("node") or which("nodejs"),
"redis_cache_config": os.path.join(bench_dir, "config", "redis_cache.conf"),
"redis_queue_config": os.path.join(bench_dir, "config", "redis_queue.conf"),
"webserver_port": config.get("webserver_port", 8000),
"gunicorn_workers": web_worker_count,
"gunicorn_max_requests": max_requests,
"gunicorn_max_requests_jitter": compute_max_requests_jitter(max_requests),
"bench_name": get_bench_name(bench_path),
"worker_target_wants": " ".join(background_workers),
"bench_cmd": which("bench"),
}
if not yes:
click.confirm(
"current systemd configuration will be overwritten. Do you want to continue?",
abort=True,
)
setup_systemd_directory(bench_path)
setup_main_config(bench_info, bench_path)
setup_workers_config(bench_info, bench_path)
setup_web_config(bench_info, bench_path)
setup_redis_config(bench_info, bench_path)
update_config({"restart_systemd_on_update": False}, bench_path=bench_path)
update_config({"restart_supervisor_on_update": False}, bench_path=bench_path)
def setup_systemd_directory(bench_path):
if not os.path.exists(os.path.join(bench_path, "config", "systemd")):
os.makedirs(os.path.join(bench_path, "config", "systemd"))
def setup_main_config(bench_info, bench_path):
# Main config
bench_template = bench.config.env().get_template("systemd/frappe-bench.target")
bench_config = bench_template.render(**bench_info)
bench_config_path = os.path.join(
bench_path, "config", "systemd", bench_info.get("bench_name") + ".target"
)
with open(bench_config_path, "w") as f:
f.write(bench_config)
def setup_workers_config(bench_info, bench_path):
# Worker Group
bench_workers_target_template = bench.config.env().get_template(
"systemd/frappe-bench-workers.target"
)
bench_default_worker_template = bench.config.env().get_template(
"systemd/frappe-bench-frappe-default-worker.service"
)
bench_short_worker_template = bench.config.env().get_template(
"systemd/frappe-bench-frappe-short-worker.service"
)
bench_long_worker_template = bench.config.env().get_template(
"systemd/frappe-bench-frappe-long-worker.service"
)
bench_schedule_worker_template = bench.config.env().get_template(
"systemd/frappe-bench-frappe-schedule.service"
)
bench_workers_target_config = bench_workers_target_template.render(**bench_info)
bench_default_worker_config = bench_default_worker_template.render(**bench_info)
bench_short_worker_config = bench_short_worker_template.render(**bench_info)
bench_long_worker_config = bench_long_worker_template.render(**bench_info)
bench_schedule_worker_config = bench_schedule_worker_template.render(**bench_info)
bench_workers_target_config_path = os.path.join(
bench_path, "config", "systemd", bench_info.get("bench_name") + "-workers.target"
)
bench_default_worker_config_path = os.path.join(
bench_path,
"config",
"systemd",
bench_info.get("bench_name") + "-frappe-default-worker@.service",
)
bench_short_worker_config_path = os.path.join(
bench_path,
"config",
"systemd",
bench_info.get("bench_name") + "-frappe-short-worker@.service",
)
bench_long_worker_config_path = os.path.join(
bench_path,
"config",
"systemd",
bench_info.get("bench_name") + "-frappe-long-worker@.service",
)
bench_schedule_worker_config_path = os.path.join(
bench_path,
"config",
"systemd",
bench_info.get("bench_name") + "-frappe-schedule.service",
)
with open(bench_workers_target_config_path, "w") as f:
f.write(bench_workers_target_config)
with open(bench_default_worker_config_path, "w") as f:
f.write(bench_default_worker_config)
with open(bench_short_worker_config_path, "w") as f:
f.write(bench_short_worker_config)
with open(bench_long_worker_config_path, "w") as f:
f.write(bench_long_worker_config)
with open(bench_schedule_worker_config_path, "w") as f:
f.write(bench_schedule_worker_config)
def setup_web_config(bench_info, bench_path):
# Web Group
bench_web_target_template = bench.config.env().get_template(
"systemd/frappe-bench-web.target"
)
bench_web_service_template = bench.config.env().get_template(
"systemd/frappe-bench-frappe-web.service"
)
bench_node_socketio_template = bench.config.env().get_template(
"systemd/frappe-bench-node-socketio.service"
)
bench_web_target_config = bench_web_target_template.render(**bench_info)
bench_web_service_config = bench_web_service_template.render(**bench_info)
bench_node_socketio_config = bench_node_socketio_template.render(**bench_info)
bench_web_target_config_path = os.path.join(
bench_path, "config", "systemd", bench_info.get("bench_name") + "-web.target"
)
bench_web_service_config_path = os.path.join(
bench_path, "config", "systemd", bench_info.get("bench_name") + "-frappe-web.service"
)
bench_node_socketio_config_path = os.path.join(
bench_path,
"config",
"systemd",
bench_info.get("bench_name") + "-node-socketio.service",
)
with open(bench_web_target_config_path, "w") as f:
f.write(bench_web_target_config)
with open(bench_web_service_config_path, "w") as f:
f.write(bench_web_service_config)
with open(bench_node_socketio_config_path, "w") as f:
f.write(bench_node_socketio_config)
def setup_redis_config(bench_info, bench_path):
# Redis Group
bench_redis_target_template = bench.config.env().get_template(
"systemd/frappe-bench-redis.target"
)
bench_redis_cache_template = bench.config.env().get_template(
"systemd/frappe-bench-redis-cache.service"
)
bench_redis_queue_template = bench.config.env().get_template(
"systemd/frappe-bench-redis-queue.service"
)
bench_redis_target_config = bench_redis_target_template.render(**bench_info)
bench_redis_cache_config = bench_redis_cache_template.render(**bench_info)
bench_redis_queue_config = bench_redis_queue_template.render(**bench_info)
bench_redis_target_config_path = os.path.join(
bench_path, "config", "systemd", bench_info.get("bench_name") + "-redis.target"
)
bench_redis_cache_config_path = os.path.join(
bench_path, "config", "systemd", bench_info.get("bench_name") + "-redis-cache.service"
)
bench_redis_queue_config_path = os.path.join(
bench_path, "config", "systemd", bench_info.get("bench_name") + "-redis-queue.service"
)
with open(bench_redis_target_config_path, "w") as f:
f.write(bench_redis_target_config)
with open(bench_redis_cache_config_path, "w") as f:
f.write(bench_redis_cache_config)
with open(bench_redis_queue_config_path, "w") as f:
f.write(bench_redis_queue_config)
def _create_symlinks(bench_path):
bench_dir = os.path.abspath(bench_path)
etc_systemd_system = os.path.join("/", "etc", "systemd", "system")
config_path = os.path.join(bench_dir, "config", "systemd")
unit_files = get_unit_files(bench_dir)
for unit_file in unit_files:
filename = "".join(unit_file)
exec_cmd(
f'sudo ln -s {config_path}/{filename} {etc_systemd_system}/{"".join(unit_file)}'
)
exec_cmd("sudo systemctl daemon-reload")
def _delete_symlinks(bench_path):
bench_dir = os.path.abspath(bench_path)
etc_systemd_system = os.path.join("/", "etc", "systemd", "system")
unit_files = get_unit_files(bench_dir)
for unit_file in unit_files:
exec_cmd(f'sudo rm {etc_systemd_system}/{"".join(unit_file)}')
exec_cmd("sudo systemctl daemon-reload")
def get_unit_files(bench_path):
bench_name = get_bench_name(bench_path)
unit_files = [
[bench_name, ".target"],
[bench_name + "-workers", ".target"],
[bench_name + "-web", ".target"],
[bench_name + "-redis", ".target"],
[bench_name + "-frappe-default-worker@", ".service"],
[bench_name + "-frappe-short-worker@", ".service"],
[bench_name + "-frappe-long-worker@", ".service"],
[bench_name + "-frappe-schedule", ".service"],
[bench_name + "-frappe-web", ".service"],
[bench_name + "-node-socketio", ".service"],
[bench_name + "-redis-cache", ".service"],
[bench_name + "-redis-queue", ".service"],
]
return unit_files
|
2302_79757062/bench
|
bench/config/systemd.py
|
Python
|
agpl-3.0
| 9,848
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Sorry! We will be back soon.</title>
<style>
body {
font-family: "Helvetica Neue", Helvetica, Arial, "Open Sans", sans-serif;
color: #36414C;
font-weight: 300;
}
.page-container {
max-width: 800px;
padding: 15px;
vertical-align: middle;
position: absolute;
top: 50%;
left: 0;
right: 0;
margin: 0 auto;
transform: translate(0%, -50%);
}
.svg-container {
float: left;
width: 150px;
padding-top: 24px;
}
.message-container {
float: left;
padding-left: 15px;
font-size: 16px;
line-height: 1.6;
}
.message-container h1 {
font-size: 48px;
line-height: 1.2;
font-weight: 200;
}
.message-container .message {
color: #8D99A6;
}
.clearfix {
clear: both;
}
a {
color: #5E64FF;
}
@media (max-width: 767px) {
.svg-container {
float: none;
padding-top: 0px;
}
.message-container {
float: none;
width: 100% !important;
}
}
</style>
</head>
<body>
<div class="page-container">
<div class="svg-container">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:sketch="http://www.bohemiancoding.com/sketch/ns" viewBox="0 0 32 32" version="1.1" x="0px" y="0px"><title>sad-face-avatar-boy-man-11</title><desc>Created with Sketch.</desc><g stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" sketch:type="MSPage"><path d="M24.5728851,23.0941141 C26.0847779,16.969087 23.5542402,12.1548858 21.4280864,11.0822754 C19.598459,12.7549171 13.9938462,17 10.2890466,17 C12.2343263,15.4384766 12.8488535,12.9394531 12.8488535,12.9394531 C12.8488535,12.9394531 10.9859461,15.7346191 7.24611965,17 C6.45154466,18.5731485 6.67682658,20.3507184 6.78659707,21.5361582 C6.84777749,21.7403698 6.95210509,22.0855229 6.99214296,22.1994274 C7.09055416,22.4793995 7.21210961,22.7924224 7.35775135,23.1301118 C7.774276,24.0958785 8.2986796,25.0616511 8.9372701,25.9603932 C10.738893,28.4959687 13.0675623,30 16,30 C18.9311427,30 21.2399187,28.4973998 23.0104666,25.9636063 C23.6381381,25.065359 24.1509411,24.1000358 24.5559252,23.1346951 C24.5616169,23.1211281 24.5672702,23.1076009 24.5728851,23.0941141 L24.5728851,23.0941141 Z M5.94669386,22.2116429 C4.61458602,20.1217921 3.13011281,13.1987617 4.62664708,8.75830078 C6.40621687,3.47802734 12.6103081,1 15.7729333,1 C18.8013894,1.00000002 21.8450169,1.93994141 23.0552307,3.80615234 C23.0552307,3.80615234 25.0915798,2.75024414 26.9020692,3.80615234 C25.0915798,4.17895508 24.887945,5.19335938 24.887945,5.19335938 C27.9234944,6.90377632 29.4577737,17.0840684 26.1082885,21.6811732 C26.0708438,21.8119773 25.9120331,22.3649335 25.857287,22.526075 C25.7549564,22.8272785 25.6289716,23.1618434 25.4780638,23.5215549 C25.0472763,24.5484017 24.5017812,25.575266 23.8301706,26.5363937 C21.888484,29.3151002 19.2996007,31 16,31 C12.7016943,31 10.0952049,29.3165313 8.12209422,26.5396068 C7.43952798,25.5789739 6.88219633,24.552559 6.43951227,23.5261382 C6.28443097,23.166562 6.15455941,22.832124 6.04872776,22.5310413 C6.02660008,22.4680898 5.98792403,22.3454665 5.94669386,22.2116429 L5.94669386,22.2116429 Z M20.6103625,20.496219 L21.7234973,21.0527864 C21.9704865,21.176281 22.0705987,21.4766175 21.9471041,21.7236068 C21.8236094,21.970596 21.5232729,22.0707082 21.2762837,21.9472136 L19.2762837,20.9472136 C18.9077594,20.7629515 18.9077594,20.2370485 19.2762837,20.0527864 L21.2762837,19.0527864 C21.5232729,18.9292918 21.8236094,19.029404 21.9471041,19.2763932 C22.0705987,19.5233825 21.9704865,19.823719 21.7234973,19.9472136 L20.6103625,20.496219 Z M11.389528,20.496219 L10.2763932,21.0527864 C10.029404,21.176281 9.92929178,21.4766175 10.0527864,21.7236068 C10.176281,21.970596 10.4766175,22.0707082 10.7236068,21.9472136 L12.7236068,20.9472136 C13.0921311,20.7629515 13.0921311,20.2370485 12.7236068,20.0527864 L10.7236068,19.0527864 C10.4766175,18.9292918 10.176281,19.029404 10.0527864,19.2763932 C9.92929178,19.5233825 10.029404,19.823719 10.2763932,19.9472136 L11.389528,20.496219 Z M14.4246316,26.7639848 C14.4725953,26.6868331 14.5938453,26.5444206 14.7863941,26.3975309 C15.1127054,26.1485979 15.512309,26 16,26 C16.487691,26 16.8872946,26.1485979 17.2136059,26.3975309 C17.4061547,26.5444206 17.5274047,26.6868331 17.5753684,26.7639848 C17.7211632,26.9985024 18.0294673,27.0704264 18.2639848,26.9246316 C18.4985024,26.7788368 18.5704264,26.4705327 18.4246316,26.2360152 C18.3171754,26.0631669 18.1191505,25.8305794 17.8201344,25.6024691 C17.3271707,25.2264021 16.7183393,25 16,25 C15.2816607,25 14.6728293,25.2264021 14.1798656,25.6024691 C13.8808495,25.8305794 13.6828246,26.0631669 13.5753684,26.2360152 C13.4295736,26.4705327 13.5014976,26.7788368 13.7360152,26.9246316 C13.9705327,27.0704264 14.2788368,26.9985024 14.4246316,26.7639848 Z" fill="#000000" sketch:type="MSShapeGroup"></path></g></svg>
</div>
<div class="message-container" style="width: calc(100% - 170px);">
<h1>
Sorry! <br>
We will be back soon.
</h1>
<p class="message">
<strong>Don't panic.</strong> It's not you, it's us.<br>
Most likely, our engineers are updating the code,
and it should take a minute for the new code to load into memory.<br><br>
Try refreshing after a minute or two.
</p>
</div>
<div class="clearfix"></div>
</div>
</body>
</html>
|
2302_79757062/bench
|
bench/config/templates/502.html
|
HTML
|
agpl-3.0
| 5,444
|
{% if not skip_redis %}
redis_cache: redis-server config/redis_cache.conf
redis_queue: redis-server config/redis_queue.conf
{% endif %}
web: bench serve {% if webserver_port -%} --port {{ webserver_port }} {%- endif %}
socketio: {{ node }} apps/frappe/socketio.js
{% if not CI %}
watch: bench watch
{% endif %}
schedule: bench schedule
worker: {{ 'OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES NO_PROXY=*' if is_mac else '' }} bench worker 1>> logs/worker.log 2>> logs/worker.error.log
{% for worker_name, worker_details in workers.items() %}
worker_{{ worker_name }}: {{ 'OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES NO_PROXY=*' if is_mac else '' }} bench worker --queue {{ worker_name }} 1>> logs/worker.log 2>> logs/worker.error.log
{% endfor %}
|
2302_79757062/bench
|
bench/config/templates/Procfile
|
Procfile
|
agpl-3.0
| 742
|
class InvalidBranchException(Exception):
pass
class InvalidRemoteException(Exception):
pass
class PatchError(Exception):
pass
class CommandFailedError(Exception):
pass
class BenchNotFoundError(Exception):
pass
class ValidationError(Exception):
pass
class AppNotInstalledError(ValidationError):
pass
class CannotUpdateReleaseBench(ValidationError):
pass
class FeatureDoesNotExistError(CommandFailedError):
pass
class NotInBenchDirectoryError(Exception):
pass
class VersionNotFound(Exception):
pass
|
2302_79757062/bench
|
bench/exceptions.py
|
Python
|
agpl-3.0
| 528
|
import os
import importlib
def run(bench_path):
source_patch_file = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "patches.txt"
)
target_patch_file = os.path.join(os.path.abspath(bench_path), "patches.txt")
with open(source_patch_file) as f:
patches = [
p.strip()
for p in f.read().splitlines()
if p.strip() and not p.strip().startswith("#")
]
executed_patches = []
if os.path.exists(target_patch_file):
with open(target_patch_file) as f:
executed_patches = f.read().splitlines()
try:
for patch in patches:
if patch not in executed_patches:
module = importlib.import_module(patch.split()[0])
execute = getattr(module, "execute")
result = execute(bench_path)
if not result:
executed_patches.append(patch)
finally:
with open(target_patch_file, "w") as f:
f.write("\n".join(executed_patches))
# end with an empty line
f.write("\n")
|
2302_79757062/bench
|
bench/patches/__init__.py
|
Python
|
agpl-3.0
| 914
|
from bench.config.common_site_config import get_config
from crontab import CronTab
def execute(bench_path):
"""
This patch fixes a cron job that would backup sites every minute per 6 hours
"""
user = get_config(bench_path=bench_path).get("frappe_user")
user_crontab = CronTab(user=user)
for job in user_crontab.find_comment("bench auto backups set for every 6 hours"):
job.every(6).hours()
user_crontab.write()
|
2302_79757062/bench
|
bench/patches/v5/fix_backup_cronjob.py
|
Python
|
agpl-3.0
| 425
|
# imports - standard imports
import getpass
import os
import subprocess
# imports - module imports
from bench.cli import change_uid_msg
from bench.config.production_setup import get_supervisor_confdir, is_centos7, service
from bench.config.common_site_config import get_config
from bench.utils import exec_cmd, get_bench_name, get_cmd_output
def is_sudoers_set():
"""Check if bench sudoers is set"""
cmd = ["sudo", "-n", "bench"]
bench_warn = False
with open(os.devnull, "wb") as f:
return_code_check = not subprocess.call(cmd, stdout=f)
if return_code_check:
try:
bench_warn = change_uid_msg in get_cmd_output(cmd, _raise=False)
except subprocess.CalledProcessError:
bench_warn = False
finally:
return_code_check = return_code_check and bench_warn
return return_code_check
def is_production_set(bench_path):
"""Check if production is set for current bench"""
production_setup = False
bench_name = get_bench_name(bench_path)
supervisor_conf_extn = "ini" if is_centos7() else "conf"
supervisor_conf_file_name = f"{bench_name}.{supervisor_conf_extn}"
supervisor_conf = os.path.join(get_supervisor_confdir(), supervisor_conf_file_name)
if os.path.exists(supervisor_conf):
production_setup = production_setup or True
nginx_conf = f"/etc/nginx/conf.d/{bench_name}.conf"
if os.path.exists(nginx_conf):
production_setup = production_setup or True
return production_setup
def execute(bench_path):
"""This patch checks if bench sudoers is set and regenerate supervisor and sudoers files"""
user = get_config(".").get("frappe_user") or getpass.getuser()
if is_sudoers_set():
if is_production_set(bench_path):
exec_cmd(f"sudo bench setup supervisor --yes --user {user}")
service("supervisord", "restart")
exec_cmd(f"sudo bench setup sudoers {user}")
|
2302_79757062/bench
|
bench/patches/v5/fix_user_permissions.py
|
Python
|
agpl-3.0
| 1,806
|
from bench.config.common_site_config import update_config
def execute(bench_path):
update_config({"live_reload": True}, bench_path)
|
2302_79757062/bench
|
bench/patches/v5/set_live_reload_config.py
|
Python
|
agpl-3.0
| 135
|
"""
Deprecate archived_sites folder for consistency. This change is
only for Frappe v14 benches. If not a v14 bench yet, skip this
patch and try again later.
1. Rename folder `./archived_sites` to `./archived/sites`
2. Create a symlink `./archived_sites` => `./archived/sites`
Corresponding changes in frappe/frappe via https://github.com/frappe/frappe/pull/15060
"""
import os
from pathlib import Path
import click
from bench.utils.app import get_current_version
from semantic_version import Version
def execute(bench_path):
frappe_version = Version(get_current_version("frappe"))
if frappe_version.major < 14 or os.name != "posix":
# Returning False means patch has been skipped
return False
pre_patch_dir = os.getcwd()
old_directory = Path(bench_path, "archived_sites")
new_directory = Path(bench_path, "archived", "sites")
if not old_directory.exists():
return False
if old_directory.is_symlink():
return True
os.chdir(bench_path)
if not os.path.exists(new_directory):
os.makedirs(new_directory)
old_directory.rename(new_directory)
click.secho(f"Archived sites are now stored under {new_directory}")
if not os.listdir(old_directory):
os.rmdir(old_directory)
os.symlink(new_directory, old_directory)
click.secho(f"Symlink {old_directory} that points to {new_directory}")
os.chdir(pre_patch_dir)
|
2302_79757062/bench
|
bench/patches/v5/update_archived_sites.py
|
Python
|
agpl-3.0
| 1,342
|
if [ $TERM != 'screen' ]
then
PS1='HEY! USE SCREEN '$PS1
fi
sw() {
screen -x $1 || screen -S $1
}
|
2302_79757062/bench
|
bench/playbooks/roles/bash_screen_wall/files/screen_wall.sh
|
Shell
|
agpl-3.0
| 115
|
# Block IPs trying to use server as proxy.
[Definition]
failregex = <HOST>.*\" 400
<HOST>.*"[A-Z]* /(cms|muieblackcat|db|cpcommerce|cgi-bin|wp-login|joomla|awstatstotals|wp-content|wp-includes|pma|phpmyadmin|myadmin|mysql|mysqladmin|sqladmin|mypma|admin|xampp|mysqldb|pmadb|phpmyadmin1|phpmyadmin2).*" 4[\d][\d]
<HOST>.*".*supports_implicit_sdk_logging.*" 4[\d][\d]
<HOST>.*".*activities?advertiser_tracking_enabled.*" 4[\d][\d]
<HOST>.*".*/picture?type=normal.*" 4[\d][\d]
<HOST>.*".*/announce.php?info_hash=.*" 4[\d][\d]
ignoreregex =
|
2302_79757062/bench
|
bench/playbooks/roles/fail2ban/templates/nginx-proxy-filter.conf.j2
|
Jinja
|
agpl-3.0
| 542
|
## block hosts trying to abuse our server as a forward proxy
[nginx-proxy]
enabled = true
filter = nginx-proxy
logpath = {{ fail2ban_nginx_access_log }}
action = iptables-multiport[name=NoNginxProxy, port="http,https"]
maxretry = {{ maxretry }}
bantime = {{ bantime }}
findtime = {{ findtime }}
|
2302_79757062/bench
|
bench/playbooks/roles/fail2ban/templates/nginx-proxy-jail.conf.j2
|
Jinja
|
agpl-3.0
| 295
|
MailTo = {{ logwatch_emails }}
Detail = {{ logwatch_detail }}
|
2302_79757062/bench
|
bench/playbooks/roles/logwatch/templates/logwatch.conf.j2
|
Jinja
|
agpl-3.0
| 61
|
# MariaDB CentOS {{ ansible_distribution_major_version|int }} repository list
# http://mariadb.org/mariadb/repositories/
[mariadb]
name = MariaDB
baseurl = http://yum.mariadb.org/{{ mariadb_version }}/centos{{ ansible_distribution_major_version|int }}-amd64
gpgkey=https://yum.mariadb.org/RPM-GPG-KEY-MariaDB
gpgcheck=1
|
2302_79757062/bench
|
bench/playbooks/roles/mariadb/templates/mariadb_centos.repo.j2
|
Jinja
|
agpl-3.0
| 320
|
# MariaDB {{ mariadb_version }} Debian {{ ansible_distribution_release | title }} repository list
# http://mariadb.org/mariadb/repositories/
deb http://ams2.mirrors.digitalocean.com/mariadb/repo/{{ mariadb_version }}/debian {{ ansible_distribution_release | lower }} main
deb-src http://ams2.mirrors.digitalocean.com/mariadb/repo/{{ mariadb_version }}/debian {{ ansible_distribution_release | lower }} main
|
2302_79757062/bench
|
bench/playbooks/roles/mariadb/templates/mariadb_debian.list.j2
|
Jinja
|
agpl-3.0
| 407
|
# MariaDB Ubuntu {{ ansible_distribution_release | title }} repository list
# http://mariadb.org/mariadb/repositories/
deb http://ams2.mirrors.digitalocean.com/mariadb/repo/{{ mariadb_version }}/ubuntu {{ ansible_distribution_release | lower }} main
deb-src http://ams2.mirrors.digitalocean.com/mariadb/repo/{{ mariadb_version }}/ubuntu {{ ansible_distribution_release | lower }} main
|
2302_79757062/bench
|
bench/playbooks/roles/mariadb/templates/mariadb_ubuntu.list.j2
|
Jinja
|
agpl-3.0
| 385
|
[client]
user=root
password={{ mysql_root_password }}
|
2302_79757062/bench
|
bench/playbooks/roles/mariadb/templates/my.cnf.j2
|
Jinja
|
agpl-3.0
| 54
|
user {{ nginx_user }};
worker_processes auto;
worker_rlimit_nofile 65535;
error_log /var/log/nginx/error.log warn;
pid /var/run/nginx.pid;
events {
worker_connections {{ nginx_worker_connections or 2048 }};
multi_accept on;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
sendfile on;
tcp_nopush on;
tcp_nodelay on;
server_tokens off;
# keepalive_timeout 10;
# keepalive_requests 10;
gzip on;
gzip_disable "msie6";
gzip_http_version 1.1;
gzip_comp_level 5;
gzip_min_length 256;
gzip_proxied any;
gzip_vary on;
gzip_types
application/atom+xml
application/javascript
application/json
application/rss+xml
application/vnd.ms-fontobject
application/x-font-ttf
application/font-woff
application/x-web-app-manifest+json
application/xhtml+xml
application/xml
font/opentype
image/svg+xml
image/x-icon
text/css
text/plain
text/x-component
;
server_names_hash_max_size 4096;
open_file_cache max=65000 inactive=1m;
open_file_cache_valid 5s;
open_file_cache_min_uses 1;
open_file_cache_errors on;
ssl_protocols SSLv3 TLSv1;
ssl_ciphers ECDHE-RSA-AES256-SHA384:AES256-SHA256:RC4:HIGH:!MD5:!aNULL:!EDH:!AESGCM;
ssl_prefer_server_ciphers on;
client_max_body_size 50m;
large_client_header_buffers 4 32k;
proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=web-cache:8m max_size=1000m inactive=600m;
include /etc/nginx/conf.d/*.conf;
}
|
2302_79757062/bench
|
bench/playbooks/roles/nginx/templates/nginx.conf.j2
|
Jinja
|
agpl-3.0
| 1,916
|
[nginx]
name=nginx repo
baseurl=http://nginx.org/packages/centos/{{ ansible_distribution_major_version }}/$basearch/
gpgcheck=0
enabled=1
|
2302_79757062/bench
|
bench/playbooks/roles/nginx/templates/nginx.repo.j2
|
Jinja
|
agpl-3.0
| 138
|
{% for vhost in nginx_vhosts %}
server {
listen {{ vhost.listen | default('80 default_server') }};
server_name {{ vhost.server_name }};
root {{ vhost.root }};
index {{ vhost.index | default('index.html index.htm') }};
{% if vhost.error_page is defined %}
error_page {{ vhost.error_page }};
{% endif %}
{% if vhost.access_log is defined %}
access_log {{ vhost.access_log }};
{% endif %}
{% if vhost.return is defined %}
return {{ vhost.return }};
{% endif %}
{% if vhost.extra_parameters is defined %}
{{ vhost.extra_parameters }};
{% endif %}
}
{% endfor %}
|
2302_79757062/bench
|
bench/playbooks/roles/nginx/templates/vhosts.j2
|
Jinja
|
agpl-3.0
| 626
|
# imports - standard imports
import json
import logging
import os
import re
import subprocess
import sys
import hashlib
from functools import lru_cache
from glob import glob
from pathlib import Path
from shlex import split
from tarfile import TarInfo
from typing import List, Optional, Tuple
# imports - third party imports
import click
# imports - module imports
from bench import PROJECT_NAME, VERSION
from bench.exceptions import (
AppNotInstalledError,
CommandFailedError,
InvalidRemoteException,
)
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Optional
logger = logging.getLogger(PROJECT_NAME)
paths_in_app = ("hooks.py", "modules.txt", "patches.txt")
paths_in_bench = ("apps", "sites", "config", "logs", "config/pids")
sudoers_file = "/etc/sudoers.d/frappe"
UNSET_ARG = object()
def is_bench_directory(directory=os.path.curdir):
is_bench = True
for folder in paths_in_bench:
path = os.path.abspath(os.path.join(directory, folder))
is_bench = is_bench and os.path.exists(path)
return is_bench
def is_frappe_app(directory: str) -> bool:
is_frappe_app = True
for folder in paths_in_app:
if not is_frappe_app:
break
path = glob(os.path.join(directory, "**", folder))
is_frappe_app = is_frappe_app and path
return bool(is_frappe_app)
def get_bench_cache_path(sub_dir: Optional[str]) -> Path:
relative_path = "~/.cache/bench"
if sub_dir and not sub_dir.startswith("/"):
relative_path += f"/{sub_dir}"
cache_path = os.path.expanduser(relative_path)
cache_path = Path(cache_path)
cache_path.mkdir(parents=True, exist_ok=True)
return cache_path
@lru_cache(maxsize=None)
def is_valid_frappe_branch(frappe_path: str, frappe_branch: str):
"""Check if a branch exists in a repo. Throws InvalidRemoteException if branch is not found
Uses native git command to check for branches on a remote.
:param frappe_path: git url
:type frappe_path: str
:param frappe_branch: branch to check
:type frappe_branch: str
:raises InvalidRemoteException: branch for this repo doesn't exist
"""
from git.cmd import Git
from git.exc import GitCommandError
g = Git()
if frappe_branch:
try:
res = g.ls_remote("--heads", "--tags", frappe_path, frappe_branch)
if not res:
raise InvalidRemoteException(
f"Invalid branch or tag: {frappe_branch} for the remote {frappe_path}"
)
except GitCommandError as e:
raise InvalidRemoteException(f"Invalid frappe path: {frappe_path}") from e
def log(message, level=0, no_log=False, stderr=False):
import bench
import bench.cli
levels = {
0: ("blue", "INFO"), # normal
1: ("green", "SUCCESS"), # success
2: ("red", "ERROR"), # fail
3: ("yellow", "WARN"), # warn/suggest
}
color, prefix = levels.get(level, levels[0])
if bench.cli.from_command_line and bench.cli.dynamic_feed:
bench.LOG_BUFFER.append({"prefix": prefix, "message": message, "color": color})
if no_log:
click.secho(message, fg=color, err=stderr)
else:
loggers = {2: logger.error, 3: logger.warning}
level_logger = loggers.get(level, logger.info)
level_logger(message)
click.secho(f"{prefix}: {message}", fg=color, err=stderr)
def check_latest_version():
if VERSION.endswith("dev"):
return
import requests
from semantic_version import Version
try:
pypi_request = requests.get("https://pypi.org/pypi/frappe-bench/json")
except Exception:
# Exceptions thrown are defined in requests.exceptions
# ignore checking on all Exceptions
return
if pypi_request.status_code == 200:
pypi_version_str = pypi_request.json().get("info").get("version")
pypi_version = Version(pypi_version_str)
local_version = Version(VERSION)
if pypi_version > local_version:
log(
f"A newer version of bench is available: {local_version} → {pypi_version}",
stderr=True,
)
def pause_exec(seconds=10):
from time import sleep
for i in range(seconds, 0, -1):
print(f"Will continue execution in {i} seconds...", end="\r")
sleep(1)
print(" " * 40, end="\r")
def exec_cmd(cmd, cwd=".", env=None, _raise=True):
if env:
env.update(os.environ.copy())
click.secho(f"$ {cmd}", fg="bright_black")
cwd_info = f"cd {cwd} && " if cwd != "." else ""
cmd_log = f"{cwd_info}{cmd}"
logger.debug(cmd_log)
spl_cmd = split(cmd)
return_code = subprocess.call(spl_cmd, cwd=cwd, universal_newlines=True, env=env)
if return_code:
logger.warning(f"{cmd_log} executed with exit code {return_code}")
if _raise:
raise CommandFailedError(cmd) from subprocess.CalledProcessError(return_code, cmd)
return return_code
def which(executable: str, raise_err: bool = False) -> str:
from shutil import which
exec_ = which(executable)
if not exec_ and raise_err:
raise FileNotFoundError(f"{executable} not found in PATH")
return exec_
def setup_logging(bench_path=".") -> logging.Logger:
LOG_LEVEL = 15
logging.addLevelName(LOG_LEVEL, "LOG")
def logv(self, message, *args, **kws):
if self.isEnabledFor(LOG_LEVEL):
self._log(LOG_LEVEL, message, args, **kws)
logging.Logger.log = logv
if os.path.exists(os.path.join(bench_path, "logs")):
log_file = os.path.join(bench_path, "logs", "bench.log")
hdlr = logging.FileHandler(log_file)
else:
hdlr = logging.NullHandler()
logger = logging.getLogger(PROJECT_NAME)
formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.DEBUG)
return logger
def get_process_manager() -> str:
for proc_man in ["honcho", "foreman", "forego"]:
proc_man_path = which(proc_man)
if proc_man_path:
return proc_man_path
def get_git_version() -> float:
"""returns git version from `git --version`
extracts version number from string `get version 1.9.1` etc"""
version = get_cmd_output("git --version")
version = version.strip().split()[2]
version = ".".join(version.split(".")[0:2])
return float(version)
def get_cmd_output(cmd, cwd=".", _raise=True):
output = ""
try:
output = subprocess.check_output(
cmd, cwd=cwd, shell=True, stderr=subprocess.PIPE, encoding="utf-8"
).strip()
except subprocess.CalledProcessError as e:
if e.output:
output = e.output
elif _raise:
raise
return output
def is_root():
return os.getuid() == 0
def run_frappe_cmd(*args, **kwargs):
from bench.cli import from_command_line
from bench.utils.bench import get_env_cmd
bench_path = kwargs.get("bench_path", ".")
f = get_env_cmd("python", bench_path=bench_path)
sites_dir = os.path.join(bench_path, "sites")
is_async = not from_command_line
if is_async:
stderr = stdout = subprocess.PIPE
else:
stderr = stdout = None
p = subprocess.Popen(
(f, "-m", "frappe.utils.bench_helper", "frappe") + args,
cwd=sites_dir,
stdout=stdout,
stderr=stderr,
)
return_code = print_output(p) if is_async else p.wait()
if return_code > 0:
sys.exit(return_code)
def print_output(p):
from select import select
while p.poll() is None:
readx = select([p.stdout.fileno(), p.stderr.fileno()], [], [])[0]
send_buffer = []
for fd in readx:
if fd == p.stdout.fileno():
while 1:
buf = p.stdout.read(1)
if not len(buf):
break
if buf == "\r" or buf == "\n":
send_buffer.append(buf)
log_line("".join(send_buffer), "stdout")
send_buffer = []
else:
send_buffer.append(buf)
if fd == p.stderr.fileno():
log_line(p.stderr.readline(), "stderr")
return p.poll()
def log_line(data, stream):
if stream == "stderr":
return sys.stderr.write(data)
return sys.stdout.write(data)
def get_bench_name(bench_path):
return os.path.basename(os.path.abspath(bench_path))
def set_git_remote_url(git_url, bench_path="."):
"Set app remote git url"
from bench.app import get_repo_dir
from bench.bench import Bench
app = git_url.rsplit("/", 1)[1].rsplit(".", 1)[0]
if app not in Bench(bench_path).apps:
raise AppNotInstalledError(f"No app named {app}")
app_dir = get_repo_dir(app, bench_path=bench_path)
if os.path.exists(os.path.join(app_dir, ".git")):
exec_cmd(f"git remote set-url upstream {git_url}", cwd=app_dir)
def run_playbook(playbook_name, extra_vars=None, tag=None):
import bench
if not which("ansible"):
print(
"Ansible is needed to run this command, please install it using 'pip"
" install ansible'"
)
sys.exit(1)
args = ["ansible-playbook", "-c", "local", playbook_name, "-vvvv"]
if extra_vars:
args.extend(["-e", json.dumps(extra_vars)])
if tag:
args.extend(["-t", tag])
subprocess.check_call(args, cwd=os.path.join(bench.__path__[0], "playbooks"))
def find_benches(directory: str = None) -> List:
if not directory:
directory = os.path.expanduser("~")
elif os.path.exists(directory):
directory = os.path.abspath(directory)
else:
log("Directory doesn't exist", level=2)
sys.exit(1)
if is_bench_directory(directory):
if os.path.curdir == directory:
print("You are in a bench directory!")
else:
print(f"{directory} is a bench directory!")
return
benches = []
try:
sub_directories = os.listdir(directory)
except PermissionError:
return benches
for sub in sub_directories:
sub = os.path.join(directory, sub)
if os.path.isdir(sub) and not os.path.islink(sub):
if is_bench_directory(sub):
print(f"{sub} found!")
benches.append(sub)
else:
benches.extend(find_benches(sub))
return benches
def is_dist_editable(dist: str) -> bool:
"""Is distribution an editable install?"""
for path_item in sys.path:
egg_link = os.path.join(path_item, f"{dist}.egg-link")
if os.path.isfile(egg_link):
return True
return False
def find_parent_bench(path: str) -> str:
"""Checks if parent directories are benches"""
if is_bench_directory(directory=path):
return path
home_path = os.path.expanduser("~")
root_path = os.path.abspath(os.sep)
if path not in {home_path, root_path}:
# NOTE: the os.path.split assumes that given path is absolute
parent_dir = os.path.split(path)[0]
return find_parent_bench(parent_dir)
def get_env_frappe_commands(bench_path=".") -> List:
"""Caches all available commands (even custom apps) via Frappe
Default caching behaviour: generated the first time any command (for a specific bench directory)
"""
from bench.utils.bench import get_env_cmd
python = get_env_cmd("python", bench_path=bench_path)
sites_path = os.path.join(bench_path, "sites")
try:
return json.loads(
get_cmd_output(
f"{python} -m frappe.utils.bench_helper get-frappe-commands", cwd=sites_path
)
)
except subprocess.CalledProcessError as e:
if hasattr(e, "stderr"):
print(e.stderr)
return []
def find_org(org_repo, using_cached: bool = False):
import requests
org_repo = org_repo[0]
for org in ["frappe", "erpnext"]:
res = requests.head(f"https://api.github.com/repos/{org}/{org_repo}")
if res.status_code in (400, 403):
res = requests.head(f"https://github.com/{org}/{org_repo}")
if res.ok:
return org, org_repo
if using_cached:
return "", org_repo
raise InvalidRemoteException(
f"{org_repo} not found under frappe or erpnext GitHub accounts"
)
def fetch_details_from_tag(
_tag: str, using_cached: bool = False
) -> Tuple[str, str, str]:
if not _tag:
raise Exception("Tag is not provided")
app_tag = _tag.split("@")
org_repo = app_tag[0].split("/")
try:
repo, tag = app_tag
except ValueError:
repo, tag = app_tag + [None]
try:
org, repo = org_repo
except Exception:
org, repo = find_org(org_repo, using_cached)
return org, repo, tag
def is_git_url(url: str) -> bool:
# modified to allow without the tailing .git from https://github.com/jonschlinkert/is-git-url.git
pattern = r"(?:git|ssh|https?|\w*@[-\w.]+):(\/\/)?(.*?)(\.git)?(\/?|\#[-\d\w._]+?)$"
return bool(re.match(pattern, url))
def drop_privileges(uid_name="nobody", gid_name="nogroup"):
import grp
import pwd
# from http://stackoverflow.com/a/2699996
if os.getuid() != 0:
# We're not root so, like, whatever dude
return
# Get the uid/gid from the name
running_uid = pwd.getpwnam(uid_name).pw_uid
running_gid = grp.getgrnam(gid_name).gr_gid
# Remove group privileges
os.setgroups([])
# Try setting the new uid/gid
os.setgid(running_gid)
os.setuid(running_uid)
# Ensure a very conservative umask
os.umask(0o22)
def get_available_folder_name(name: str, path: str) -> str:
"""Subfixes the passed name with -1 uptil -100 whatever's available"""
if os.path.exists(os.path.join(path, name)):
for num in range(1, 100):
_dt = f"{name}_{num}"
if not os.path.exists(os.path.join(path, _dt)):
return _dt
return name
def get_traceback() -> str:
"""Returns the traceback of the Exception"""
from traceback import format_exception
exc_type, exc_value, exc_tb = sys.exc_info()
if not any([exc_type, exc_value, exc_tb]):
return ""
trace_list = format_exception(exc_type, exc_value, exc_tb)
return "".join(trace_list)
class _dict(dict):
"""dict like object that exposes keys as attributes"""
# bench port of frappe._dict
def __getattr__(self, key):
ret = self.get(key)
# "__deepcopy__" exception added to fix frappe#14833 via DFP
if not ret and key.startswith("__") and key != "__deepcopy__":
raise AttributeError()
return ret
def __setattr__(self, key, value):
self[key] = value
def __getstate__(self):
return self
def __setstate__(self, d):
self.update(d)
def update(self, d):
"""update and return self -- the missing dict feature in python"""
super().update(d)
return self
def copy(self):
return _dict(dict(self).copy())
def get_cmd_from_sysargv():
"""Identify and segregate tokens to options and command
For Command: `bench --profile --site frappeframework.com migrate --no-backup`
sys.argv: ["/home/frappe/.local/bin/bench", "--profile", "--site", "frappeframework.com", "migrate", "--no-backup"]
Actual command run: migrate
"""
# context is passed as options to frappe's bench_helper
from bench.bench import Bench
frappe_context = _dict(params={"--site"}, flags={"--verbose", "--profile", "--force"})
cmd_from_ctx = None
sys_argv = sys.argv[1:]
skip_next = False
for arg in sys_argv:
if skip_next:
skip_next = False
continue
if arg in frappe_context.flags:
continue
elif arg in frappe_context.params:
skip_next = True
continue
if sys_argv.index(arg) == 0 and arg in Bench(".").apps:
continue
cmd_from_ctx = arg
break
return cmd_from_ctx
def get_app_cache_extract_filter(
count_threshold: int = 10_000,
size_threshold: int = 1_000_000_000,
): # -> Callable[[TarInfo, str], TarInfo | None]
state = dict(count=0, size=0)
AbsoluteLinkError = Exception
def data_filter(m: TarInfo, _: str) -> TarInfo:
return m
if (
sys.version_info.major == 3 and sys.version_info.minor > 7
) or sys.version_info.major > 3:
from tarfile import data_filter, AbsoluteLinkError
def filter_function(member: TarInfo, dest_path: str) -> Optional[TarInfo]:
state["count"] += 1
state["size"] += member.size
if state["count"] > count_threshold:
raise RuntimeError(f"Number of entries exceeds threshold ({state['count']})")
if state["size"] > size_threshold:
raise RuntimeError(f"Extracted size exceeds threshold ({state['size']})")
try:
return data_filter(member, dest_path)
except AbsoluteLinkError:
# Links created by `frappe` after extraction
return None
return filter_function
def get_file_md5(p: Path) -> "str":
with open(p.as_posix(), "rb") as f:
try:
file_md5 = hashlib.md5(usedforsecurity=False)
# Will throw if < 3.9, can be removed once support
# is dropped
except TypeError:
file_md5 = hashlib.md5()
while chunk := f.read(2**16):
file_md5.update(chunk)
return file_md5.hexdigest()
|
2302_79757062/bench
|
bench/utils/__init__.py
|
Python
|
agpl-3.0
| 15,707
|
# imports - standard imports
import os
import pathlib
import re
import sys
import subprocess
from typing import List, Optional
from functools import lru_cache
# imports - module imports
from bench.exceptions import (
InvalidRemoteException,
InvalidBranchException,
CommandFailedError,
VersionNotFound,
)
from bench.app import get_repo_dir
def is_version_upgrade(app="frappe", bench_path=".", branch=None):
upstream_version = get_upstream_version(app=app, branch=branch, bench_path=bench_path)
if not upstream_version:
raise InvalidBranchException(
f"Specified branch of app {app} is not in upstream remote"
)
local_version = get_major_version(get_current_version(app, bench_path=bench_path))
upstream_version = get_major_version(upstream_version)
if upstream_version > local_version:
return (True, local_version, upstream_version)
return (False, local_version, upstream_version)
def switch_branch(branch, apps=None, bench_path=".", upgrade=False, check_upgrade=True):
import git
from bench.bench import Bench
from bench.utils import log, exec_cmd
from bench.utils.bench import (
build_assets,
patch_sites,
post_upgrade,
)
from bench.utils.system import backup_all_sites
apps_dir = os.path.join(bench_path, "apps")
version_upgrade = (False,)
switched_apps = []
if not apps:
apps = [
name for name in os.listdir(apps_dir) if os.path.isdir(os.path.join(apps_dir, name))
]
for app in apps:
app_dir = os.path.join(apps_dir, app)
if not os.path.exists(app_dir):
log(f"{app} does not exist!", level=2)
continue
repo = git.Repo(app_dir)
unshallow_flag = os.path.exists(os.path.join(app_dir, ".git", "shallow"))
log(f"Fetching upstream {'unshallow ' if unshallow_flag else ''}for {app}")
exec_cmd("git remote set-branches upstream '*'", cwd=app_dir)
exec_cmd(
f"git fetch --all{' --unshallow' if unshallow_flag else ''} --quiet", cwd=app_dir
)
if check_upgrade:
version_upgrade = is_version_upgrade(app=app, bench_path=bench_path, branch=branch)
if version_upgrade[0] and not upgrade:
log(
f"Switching to {branch} will cause upgrade from"
f" {version_upgrade[1]} to {version_upgrade[2]}. Pass --upgrade to"
" confirm",
level=2,
)
sys.exit(1)
print("Switching for " + app)
exec_cmd(f"git checkout -f {branch}", cwd=app_dir)
if str(repo.active_branch) == branch:
switched_apps.append(app)
else:
log(f"Switching branches failed for: {app}", level=2)
if switched_apps:
log(f"Successfully switched branches for: {', '.join(switched_apps)}", level=1)
print(
"Please run `bench update --patch` to be safe from any differences in"
" database schema"
)
if version_upgrade[0] and upgrade:
Bench(bench_path).setup.requirements()
backup_all_sites()
patch_sites()
build_assets()
post_upgrade(version_upgrade[1], version_upgrade[2])
def switch_to_branch(branch=None, apps=None, bench_path=".", upgrade=False):
switch_branch(branch, apps=apps, bench_path=bench_path, upgrade=upgrade)
def switch_to_develop(apps=None, bench_path=".", upgrade=True):
switch_branch("develop", apps=apps, bench_path=bench_path, upgrade=upgrade)
def get_version_from_string(contents, field="__version__"):
match = re.search(
r"^(\s*%s\s*=\s*['\\\"])(.+?)(['\"])" % field, contents, flags=(re.S | re.M)
)
if not match:
raise VersionNotFound(f"{contents} is not a valid version")
return match.group(2)
def get_major_version(version):
import semantic_version
return semantic_version.Version(version).major
def get_develop_version(app, bench_path="."):
repo_dir = get_repo_dir(app, bench_path=bench_path)
with open(os.path.join(repo_dir, os.path.basename(repo_dir), "hooks.py")) as f:
return get_version_from_string(f.read(), field="develop_version")
def get_upstream_version(app, branch=None, bench_path="."):
repo_dir = get_repo_dir(app, bench_path=bench_path)
if not branch:
branch = get_current_branch(app, bench_path=bench_path)
try:
subprocess.call(
f"git fetch --depth=1 --no-tags upstream {branch}", shell=True, cwd=repo_dir
)
except CommandFailedError:
raise InvalidRemoteException(f"Failed to fetch from remote named upstream for {app}")
try:
contents = subprocess.check_output(
f"git show upstream/{branch}:{app}/__init__.py",
shell=True,
cwd=repo_dir,
stderr=subprocess.STDOUT,
)
contents = contents.decode("utf-8")
except subprocess.CalledProcessError as e:
if b"Invalid object" in e.output:
return None
else:
raise
return get_version_from_string(contents)
def get_current_frappe_version(bench_path="."):
try:
return get_major_version(get_current_version("frappe", bench_path=bench_path))
except OSError:
return 0
def get_current_branch(app, bench_path="."):
from bench.utils import get_cmd_output
repo_dir = get_repo_dir(app, bench_path=bench_path)
return get_cmd_output("git symbolic-ref -q --short HEAD", cwd=repo_dir)
@lru_cache(maxsize=5)
def get_required_deps(org, name, branch, deps="hooks.py"):
import requests
import base64
git_api_url = f"https://api.github.com/repos/{org}/{name}/contents/{name}/{deps}"
params = {"ref": branch or "develop"}
res = requests.get(url=git_api_url, params=params).json()
if "message" in res:
git_url = (
f"https://raw.githubusercontent.com/{org}/{name}/{params['ref']}/{name}/{deps}"
)
return requests.get(git_url).text
return base64.decodebytes(res["content"].encode()).decode()
def required_apps_from_hooks(required_deps: str, local: bool = False) -> List:
import ast
required_apps_re = re.compile(r"required_apps\s+=\s+(.*)")
if local:
required_deps = pathlib.Path(required_deps).read_text()
_req_apps_tag = required_apps_re.search(required_deps)
req_apps_tag = _req_apps_tag[1]
return ast.literal_eval(req_apps_tag)
def get_remote(app, bench_path="."):
repo_dir = get_repo_dir(app, bench_path=bench_path)
contents = subprocess.check_output(
["git", "remote", "-v"], cwd=repo_dir, stderr=subprocess.STDOUT
)
contents = contents.decode("utf-8")
if re.findall(r"upstream[\s]+", contents):
return "upstream"
elif not contents:
# if contents is an empty string => remote doesn't exist
return False
else:
# get the first remote
return contents.splitlines()[0].split()[0]
def get_app_name(bench_path: str, folder_name: str) -> str:
"""Retrieves `name` attribute of app - equivalent to distribution name
of python package. Fetches from pyproject.toml, setup.cfg or setup.py
whichever defines it in that order.
"""
app_name = None
apps_path = os.path.join(os.path.abspath(bench_path), "apps")
config_py_path = os.path.join(apps_path, folder_name, "setup.cfg")
setup_py_path = os.path.join(apps_path, folder_name, "setup.py")
pyproject_path = os.path.join(apps_path, folder_name, "pyproject.toml")
pyproject = get_pyproject(pyproject_path)
if pyproject:
app_name = pyproject.get("project", {}).get("name")
if not app_name and os.path.exists(config_py_path):
from setuptools.config import read_configuration
config = read_configuration(config_py_path)
app_name = config.get("metadata", {}).get("name")
if not app_name:
# retrieve app name from setup.py as fallback
with open(setup_py_path, "rb") as f:
app_name = re.search(r'name\s*=\s*[\'"](.*)[\'"]', f.read().decode("utf-8"))[1]
if app_name and folder_name != app_name:
os.rename(os.path.join(apps_path, folder_name), os.path.join(apps_path, app_name))
return app_name
return folder_name
def get_pyproject(pyproject_path: str) -> Optional[dict]:
if not os.path.exists(pyproject_path):
return None
try:
from tomli import load
except ImportError:
from tomllib import load
with open(pyproject_path, "rb") as f:
return load(f)
def check_existing_dir(bench_path, repo_name):
cloned_path = os.path.join(bench_path, "apps", repo_name)
dir_already_exists = os.path.isdir(cloned_path)
return dir_already_exists, cloned_path
def get_current_version(app, bench_path="."):
current_version = None
repo_dir = get_repo_dir(app, bench_path=bench_path)
pyproject_path = os.path.join(repo_dir, "pyproject.toml")
config_path = os.path.join(repo_dir, "setup.cfg")
init_path = os.path.join(repo_dir, os.path.basename(repo_dir), "__init__.py")
setup_path = os.path.join(repo_dir, "setup.py")
try:
pyproject = get_pyproject(pyproject_path)
if pyproject:
current_version = pyproject.get("project", {}).get("version")
if not current_version and os.path.exists(config_path):
from setuptools.config import read_configuration
config = read_configuration(config_path)
current_version = config.get("metadata", {}).get("version")
if not current_version:
with open(init_path) as f:
current_version = get_version_from_string(f.read())
except (AttributeError, VersionNotFound):
# backward compatibility
with open(setup_path) as f:
current_version = get_version_from_string(f.read(), field="version")
return current_version
|
2302_79757062/bench
|
bench/utils/app.py
|
Python
|
agpl-3.0
| 8,965
|
# imports - standard imports
import contextlib
import json
import logging
import os
import re
import shutil
import subprocess
import sys
from functools import lru_cache
from glob import glob
from json.decoder import JSONDecodeError
from pathlib import Path
# imports - third party imports
import click
# imports - module imports
import bench
from bench.exceptions import PatchError, ValidationError
from bench.utils import (
exec_cmd,
get_bench_cache_path,
get_bench_name,
get_cmd_output,
log,
which,
)
logger = logging.getLogger(bench.PROJECT_NAME)
@lru_cache(maxsize=None)
def get_env_cmd(cmd: str, bench_path: str = ".") -> str:
exact_location = os.path.abspath(
os.path.join(bench_path, "env", "bin", cmd.strip("*"))
)
if os.path.exists(exact_location):
return exact_location
# this supports envs' generated by patched virtualenv or venv (which may cause an extra 'local' folder to be created)
existing_python_bins = glob(
os.path.join(bench_path, "env", "**", "bin", cmd), recursive=True
)
if existing_python_bins:
return os.path.abspath(existing_python_bins[0])
return exact_location
def get_venv_path(verbose=False, python="python3"):
with open(os.devnull, "wb") as devnull:
is_venv_installed = not subprocess.call(
[python, "-m", "venv", "--help"], stdout=devnull
)
if is_venv_installed:
return f"{python} -m venv"
else:
log("venv cannot be found", level=2)
def update_node_packages(bench_path=".", apps=None, verbose=None):
print("Updating node packages...")
from distutils.version import LooseVersion
from bench.utils.app import get_develop_version
v = LooseVersion(get_develop_version("frappe", bench_path=bench_path))
# After rollup was merged, frappe_version = 10.1
# if develop_verion is 11 and up, only then install yarn
if v < LooseVersion("11.x.x-develop"):
update_npm_packages(bench_path, apps=apps, verbose=verbose)
else:
update_yarn_packages(bench_path, apps=apps, verbose=verbose)
def install_python_dev_dependencies(bench_path=".", apps=None, verbose=False):
import bench.cli
from bench.bench import Bench
verbose = bench.cli.verbose or verbose
quiet_flag = "" if verbose else "--quiet"
bench = Bench(bench_path)
if isinstance(apps, str):
apps = (apps,)
elif not apps:
apps = bench.get_installed_apps()
for app in apps:
pyproject_deps = None
app_path = os.path.join(bench_path, "apps", app)
pyproject_path = os.path.join(app_path, "pyproject.toml")
dev_requirements_path = os.path.join(app_path, "dev-requirements.txt")
if os.path.exists(pyproject_path):
pyproject_deps = _generate_dev_deps_pattern(pyproject_path)
if pyproject_deps:
bench.run(f"{bench.python} -m pip install {quiet_flag} --upgrade {pyproject_deps}")
if not pyproject_deps and os.path.exists(dev_requirements_path):
bench.run(
f"{bench.python} -m pip install {quiet_flag} --upgrade -r {dev_requirements_path}"
)
def _generate_dev_deps_pattern(pyproject_path):
try:
from tomli import loads
except ImportError:
from tomllib import loads
requirements_pattern = ""
pyroject_config = loads(open(pyproject_path).read())
with contextlib.suppress(KeyError):
for pkg, version in pyroject_config["tool"]["bench"]["dev-dependencies"].items():
op = "==" if "=" not in version else ""
requirements_pattern += f"{pkg}{op}{version} "
return requirements_pattern
def update_yarn_packages(bench_path=".", apps=None, verbose=None):
import bench.cli as bench_cli
from bench.bench import Bench
verbose = bench_cli.verbose or verbose
bench = Bench(bench_path)
apps = apps or bench.apps
apps_dir = os.path.join(bench.name, "apps")
# TODO: Check for stuff like this early on only??
if not which("yarn"):
print("Please install yarn using below command and try again.")
print("`npm install -g yarn`")
return
for app in apps:
app_path = os.path.join(apps_dir, app)
if os.path.exists(os.path.join(app_path, "package.json")):
click.secho(f"\nInstalling node dependencies for {app}", fg="yellow")
yarn_install = "yarn install --check-files"
if verbose:
yarn_install += " --verbose"
bench.run(yarn_install, cwd=app_path)
def update_npm_packages(bench_path=".", apps=None, verbose=None):
verbose = bench.cli.verbose or verbose
npm_install = "npm install --verbose" if verbose else "npm install"
apps_dir = os.path.join(bench_path, "apps")
package_json = {}
if not apps:
apps = os.listdir(apps_dir)
for app in apps:
package_json_path = os.path.join(apps_dir, app, "package.json")
if os.path.exists(package_json_path):
with open(package_json_path) as f:
app_package_json = json.loads(f.read())
# package.json is usually a dict in a dict
for key, value in app_package_json.items():
if key not in package_json:
package_json[key] = value
else:
if isinstance(value, dict):
package_json[key].update(value)
elif isinstance(value, list):
package_json[key].extend(value)
else:
package_json[key] = value
if package_json == {}:
with open(os.path.join(os.path.dirname(__file__), "package.json")) as f:
package_json = json.loads(f.read())
with open(os.path.join(bench_path, "package.json"), "w") as f:
f.write(json.dumps(package_json, indent=1, sort_keys=True))
exec_cmd(npm_install, cwd=bench_path)
def migrate_env(python, backup=False):
import shutil
from urllib.parse import urlparse
from bench.bench import Bench
bench = Bench(".")
nvenv = "env"
path = os.getcwd()
python = which(python)
pvenv = os.path.join(path, nvenv)
if python.startswith(pvenv):
# The supplied python version is in active virtualenv which we are about to nuke.
click.secho(
"Python version supplied is present in currently sourced virtual environment.\n"
"`deactiviate` the current virtual environment before migrating environments.",
fg="yellow",
)
sys.exit(1)
# Clear Cache before Bench Dies.
try:
config = bench.conf
rredis = urlparse(config["redis_cache"])
redis = f"{which('redis-cli')} -p {rredis.port}"
logger.log("Clearing Redis Cache...")
exec_cmd(f"{redis} FLUSHALL")
logger.log("Clearing Redis DataBase...")
exec_cmd(f"{redis} FLUSHDB")
except Exception:
logger.warning("Please ensure Redis Connections are running or Daemonized.")
# Backup venv: restore using `virtualenv --relocatable` if needed
if backup:
from datetime import datetime
parch = os.path.join(path, "archived", "envs")
os.makedirs(parch, exist_ok=True)
source = os.path.join(path, "env")
target = parch
logger.log("Backing up Virtual Environment")
stamp = datetime.now().strftime("%Y%m%d_%H%M%S")
dest = os.path.join(path, str(stamp))
os.rename(source, dest)
shutil.move(dest, target)
# Create virtualenv using specified python
def _install_app(app):
app_path = f"-e {os.path.join('apps', app)}"
exec_cmd(f"{pvenv}/bin/python -m pip install --upgrade {app_path}")
try:
logger.log(f"Setting up a New Virtual {python} Environment")
exec_cmd(f"{python} -m venv {pvenv}")
# Install frappe first
_install_app("frappe")
for app in bench.apps:
if str(app) != "frappe":
_install_app(app)
logger.log(f"Migration Successful to {python}")
except Exception:
logger.warning("Python env migration Error", exc_info=True)
raise
def validate_upgrade(from_ver, to_ver, bench_path="."):
if to_ver >= 6 and not which("npm") and not which("node") and not which("nodejs"):
raise Exception("Please install nodejs and npm")
def post_upgrade(from_ver, to_ver, bench_path="."):
from bench.bench import Bench
from bench.config import redis
from bench.config.nginx import make_nginx_conf
from bench.config.supervisor import generate_supervisor_config
conf = Bench(bench_path).conf
print("-" * 80 + f"Your bench was upgraded to version {to_ver}")
if conf.get("restart_supervisor_on_update"):
redis.generate_config(bench_path=bench_path)
generate_supervisor_config(bench_path=bench_path)
make_nginx_conf(bench_path=bench_path)
print(
"As you have setup your bench for production, you will have to reload"
" configuration for nginx and supervisor. To complete the migration, please"
" run the following commands:\nsudo service nginx restart\nsudo"
" supervisorctl reload"
)
def patch_sites(bench_path="."):
from bench.bench import Bench
from bench.utils.system import migrate_site
bench = Bench(bench_path)
for site in bench.sites:
try:
migrate_site(site, bench_path=bench_path)
except subprocess.CalledProcessError:
raise PatchError
def restart_supervisor_processes(bench_path=".", web_workers=False, _raise=False):
from bench.bench import Bench
bench = Bench(bench_path)
conf = bench.conf
cmd = conf.get("supervisor_restart_cmd")
bench_name = get_bench_name(bench_path)
if cmd:
bench.run(cmd, _raise=_raise)
else:
sudo = ""
try:
supervisor_status = get_cmd_output("supervisorctl status", cwd=bench_path)
except subprocess.CalledProcessError as e:
if e.returncode == 127:
log("restart failed: Couldn't find supervisorctl in PATH", level=3)
return
sudo = "sudo "
supervisor_status = get_cmd_output("sudo supervisorctl status", cwd=bench_path)
if not sudo and (
"error: <class 'PermissionError'>, [Errno 13] Permission denied" in supervisor_status
):
sudo = "sudo "
supervisor_status = get_cmd_output("sudo supervisorctl status", cwd=bench_path)
if web_workers and f"{bench_name}-web:" in supervisor_status:
groups = [f"{bench_name}-web:\t"]
elif f"{bench_name}-workers:" in supervisor_status:
groups = [f"{bench_name}-web:", f"{bench_name}-workers:"]
# backward compatibility
elif f"{bench_name}-processes:" in supervisor_status:
groups = [f"{bench_name}-processes:"]
# backward compatibility
else:
groups = ["frappe:"]
for group in groups:
failure = bench.run(f"{sudo}supervisorctl restart {group}", _raise=_raise)
if failure:
log(
f"restarting supervisor group `{group}` failed. Use `bench restart` to retry.",
level=3,
)
def restart_systemd_processes(bench_path=".", web_workers=False, _raise=True):
bench_name = get_bench_name(bench_path)
exec_cmd(
f"sudo systemctl stop -- $(systemctl show -p Requires {bench_name}.target | cut"
" -d= -f2)",
_raise=_raise,
)
exec_cmd(
f"sudo systemctl start -- $(systemctl show -p Requires {bench_name}.target |"
" cut -d= -f2)",
_raise=_raise,
)
def restart_process_manager(bench_path=".", web_workers=False):
# only overmind has the restart feature, not sure other supported procmans do
if which("overmind") and os.path.exists(os.path.join(bench_path, ".overmind.sock")):
worker = "web" if web_workers else ""
exec_cmd(f"overmind restart {worker}", cwd=bench_path)
def build_assets(bench_path=".", app=None, using_cached=False):
command = "bench build"
if app:
command += f" --app {app}"
env = {"BENCH_DEVELOPER": "1"}
if using_cached:
env["USING_CACHED"] = "1"
exec_cmd(command, cwd=bench_path, env=env)
def handle_version_upgrade(version_upgrade, bench_path, force, reset, conf):
from bench.utils import log, pause_exec
if version_upgrade[0]:
if force:
log(
"""Force flag has been used for a major version change in Frappe and it's apps.
This will take significant time to migrate and might break custom apps.""",
level=3,
)
else:
print(
f"""This update will cause a major version change in Frappe/ERPNext from {version_upgrade[1]} to {version_upgrade[2]}.
This would take significant time to migrate and might break custom apps."""
)
click.confirm("Do you want to continue?", abort=True)
if not reset and conf.get("shallow_clone"):
log(
"""shallow_clone is set in your bench config.
However without passing the --reset flag, your repositories will be unshallowed.
To avoid this, cancel this operation and run `bench update --reset`.
Consider the consequences of `git reset --hard` on your apps before you run that.
To avoid seeing this warning, set shallow_clone to false in your common_site_config.json
""",
level=3,
)
pause_exec(seconds=10)
if version_upgrade[0] or (not version_upgrade[0] and force):
validate_upgrade(version_upgrade[1], version_upgrade[2], bench_path=bench_path)
def update(
pull: bool = False,
apps: str = None,
patch: bool = False,
build: bool = False,
requirements: bool = False,
backup: bool = True,
compile: bool = True,
force: bool = False,
reset: bool = False,
restart_supervisor: bool = False,
restart_systemd: bool = False,
):
"""command: bench update"""
import re
from bench import patches
from bench.app import pull_apps
from bench.bench import Bench
from bench.config.common_site_config import update_config
from bench.exceptions import CannotUpdateReleaseBench
from bench.utils.app import is_version_upgrade
from bench.utils.system import backup_all_sites
bench_path = os.path.abspath(".")
bench = Bench(bench_path)
patches.run(bench_path=bench_path)
conf = bench.conf
if conf.get("release_bench"):
raise CannotUpdateReleaseBench("Release bench detected, cannot update!")
if not (pull or patch or build or requirements):
pull, patch, build, requirements = True, True, True, True
if apps and pull:
apps = [app.strip() for app in re.split(",| ", apps) if app]
else:
apps = []
validate_branch()
version_upgrade = is_version_upgrade()
handle_version_upgrade(version_upgrade, bench_path, force, reset, conf)
conf.update({"maintenance_mode": 1, "pause_scheduler": 1})
update_config(conf, bench_path=bench_path)
if backup:
print("Backing up sites...")
backup_all_sites(bench_path=bench_path)
if pull:
print("Updating apps source...")
pull_apps(apps=apps, bench_path=bench_path, reset=reset)
if requirements:
print("Setting up requirements...")
bench.setup.requirements()
if patch:
print("Patching sites...")
patch_sites(bench_path=bench_path)
if build:
print("Building assets...")
bench.build()
if version_upgrade[0] or (not version_upgrade[0] and force):
post_upgrade(version_upgrade[1], version_upgrade[2], bench_path=bench_path)
bench.reload(web=False, supervisor=restart_supervisor, systemd=restart_systemd)
conf.update({"maintenance_mode": 0, "pause_scheduler": 0})
update_config(conf, bench_path=bench_path)
print(
"_" * 80 + "\nBench: Deployment tool for Frappe and Frappe Applications"
" (https://frappe.io/bench).\nOpen source depends on your contributions, so do"
" give back by submitting bug reports, patches and fixes and be a part of the"
" community :)"
)
def clone_apps_from(bench_path, clone_from, update_app=True):
from bench.app import install_app
print(f"Copying apps from {clone_from}...")
subprocess.check_output(["cp", "-R", os.path.join(clone_from, "apps"), bench_path])
node_modules_path = os.path.join(clone_from, "node_modules")
if os.path.exists(node_modules_path):
print(f"Copying node_modules from {clone_from}...")
subprocess.check_output(["cp", "-R", node_modules_path, bench_path])
def setup_app(app):
# run git reset --hard in each branch, pull latest updates and install_app
app_path = os.path.join(bench_path, "apps", app)
# remove .egg-ino
subprocess.check_output(["rm", "-rf", app + ".egg-info"], cwd=app_path)
if update_app and os.path.exists(os.path.join(app_path, ".git")):
remotes = subprocess.check_output(["git", "remote"], cwd=app_path).strip().split()
if "upstream" in remotes:
remote = "upstream"
else:
remote = remotes[0]
print(f"Cleaning up {app}")
branch = subprocess.check_output(
["git", "rev-parse", "--abbrev-ref", "HEAD"], cwd=app_path
).strip()
subprocess.check_output(["git", "reset", "--hard"], cwd=app_path)
subprocess.check_output(["git", "pull", "--rebase", remote, branch], cwd=app_path)
install_app(app, bench_path, restart_bench=False)
with open(os.path.join(clone_from, "sites", "apps.txt")) as f:
apps = f.read().splitlines()
for app in apps:
setup_app(app)
def remove_backups_crontab(bench_path="."):
from crontab import CronTab
from bench.bench import Bench
logger.log("removing backup cronjob")
bench_dir = os.path.abspath(bench_path)
user = Bench(bench_dir).conf.get("frappe_user")
logfile = os.path.join(bench_dir, "logs", "backup.log")
system_crontab = CronTab(user=user)
backup_command = f"cd {bench_dir} && {sys.argv[0]} --verbose --site all backup"
job_command = f"{backup_command} >> {logfile} 2>&1"
system_crontab.remove_all(command=job_command)
def set_mariadb_host(host, bench_path="."):
update_common_site_config({"db_host": host}, bench_path=bench_path)
def set_redis_cache_host(host, bench_path="."):
update_common_site_config({"redis_cache": f"redis://{host}"}, bench_path=bench_path)
def set_redis_queue_host(host, bench_path="."):
update_common_site_config({"redis_queue": f"redis://{host}"}, bench_path=bench_path)
def set_redis_socketio_host(host, bench_path="."):
update_common_site_config({"redis_socketio": f"redis://{host}"}, bench_path=bench_path)
def update_common_site_config(ddict, bench_path="."):
filename = os.path.join(bench_path, "sites", "common_site_config.json")
if os.path.exists(filename):
with open(filename) as f:
content = json.load(f)
else:
content = {}
content.update(ddict)
with open(filename, "w") as f:
json.dump(content, f, indent=1, sort_keys=True)
def validate_app_installed_on_sites(app, bench_path="."):
print("Checking if app installed on active sites...")
ret = check_app_installed(app, bench_path=bench_path)
if ret is None:
check_app_installed_legacy(app, bench_path=bench_path)
else:
return ret
def check_app_installed(app, bench_path="."):
try:
out = subprocess.check_output(
["bench", "--site", "all", "list-apps", "--format", "json"],
stderr=open(os.devnull, "wb"),
cwd=bench_path,
).decode("utf-8")
except subprocess.CalledProcessError:
return None
try:
apps_sites_dict = json.loads(out)
except JSONDecodeError:
return None
for site, apps in apps_sites_dict.items():
if app in apps:
raise ValidationError(f"Cannot remove, app is installed on site: {site}")
def check_app_installed_legacy(app, bench_path="."):
site_path = os.path.join(bench_path, "sites")
for site in os.listdir(site_path):
req_file = os.path.join(site_path, site, "site_config.json")
if os.path.exists(req_file):
out = subprocess.check_output(
["bench", "--site", site, "list-apps"], cwd=bench_path
).decode("utf-8")
if re.search(r"\b" + app + r"\b", out):
print(f"Cannot remove, app is installed on site: {site}")
sys.exit(1)
def validate_branch():
from bench.bench import Bench
from bench.utils.app import get_current_branch
apps = Bench(".").apps
installed_apps = set(apps)
check_apps = {"frappe", "erpnext"}
intersection_apps = installed_apps.intersection(check_apps)
for app in intersection_apps:
branch = get_current_branch(app)
if branch == "master":
print(
"""'master' branch is renamed to 'version-11' since 'version-12' release.
As of January 2020, the following branches are
version Frappe ERPNext
11 version-11 version-11
12 version-12 version-12
13 version-13 version-13
14 develop develop
Please switch to new branches to get future updates.
To switch to your required branch, run the following commands: bench switch-to-branch [branch-name]"""
)
sys.exit(1)
def cache_helper(clear=False, remove_app="", remove_key="") -> None:
can_remove = bool(remove_key or remove_app)
if not clear and not can_remove:
cache_list()
elif can_remove:
cache_remove(remove_app, remove_key)
elif clear:
cache_clear()
else:
pass # unreachable
def cache_list() -> None:
from datetime import datetime
tot_size = 0
tot_items = 0
printed_header = False
for item in get_bench_cache_path("apps").iterdir():
if item.suffix not in [".tar", ".tgz"]:
continue
stat = item.stat()
size_mb = stat.st_size / 1_000_000
created = datetime.fromtimestamp(stat.st_ctime)
accessed = datetime.fromtimestamp(stat.st_atime)
app = item.name.split(".")[0]
tot_items += 1
tot_size += stat.st_size
compressed = item.suffix == ".tgz"
if not printed_header:
click.echo(
f"{'APP':15} "
f"{'FILE':90} "
f"{'SIZE':>13} "
f"{'COMPRESSED'} "
f"{'CREATED':19} "
f"{'ACCESSED':19} "
)
printed_header = True
click.echo(
f"{app:15} "
f"{item.name:90} "
f"{size_mb:10.3f} MB "
f"{str(compressed):10} "
f"{created:%Y-%m-%d %H:%M:%S} "
f"{accessed:%Y-%m-%d %H:%M:%S} "
)
if tot_items:
click.echo(f"Total size {tot_size / 1_000_000:.3f} MB belonging to {tot_items} items")
else:
click.echo("No cached items")
def cache_remove(app: str = "", key: str = "") -> None:
rem_items = 0
rem_size = 0
for item in get_bench_cache_path("apps").iterdir():
if not should_remove_item(item, app, key):
continue
rem_items += 1
rem_size += item.stat().st_size
item.unlink(True)
click.echo(f"Removed {item.name}")
if rem_items:
click.echo(f"Cleared {rem_size / 1_000_000:.3f} MB belonging to {rem_items} items")
else:
click.echo("No items removed")
def should_remove_item(item: Path, app: str, key: str) -> bool:
if item.suffix not in [".tar", ".tgz"]:
return False
name = item.name
if app and key and name.startswith(f"{app}-{key[:10]}."):
return True
if app and name.startswith(f"{app}-"):
return True
if key and f"-{key[:10]}." in name:
return True
return False
def cache_clear() -> None:
cache_path = get_bench_cache_path("apps")
tot_items = len(os.listdir(cache_path))
if not tot_items:
click.echo("No cached items")
return
tot_size = get_dir_size(cache_path)
shutil.rmtree(cache_path)
if tot_items:
click.echo(f"Cleared {tot_size / 1_000_000:.3f} MB belonging to {tot_items} items")
def get_dir_size(p: Path) -> int:
return sum(i.stat(follow_symlinks=False).st_size for i in p.iterdir())
|
2302_79757062/bench
|
bench/utils/bench.py
|
Python
|
agpl-3.0
| 21,945
|
from typing import List
import click
from click.core import _check_multicommand
def print_bench_version(ctx, param, value):
"""Prints current bench version"""
if not value or ctx.resilient_parsing:
return
import bench
click.echo(bench.VERSION)
ctx.exit()
class MultiCommandGroup(click.Group):
def add_command(self, cmd, name=None):
"""Registers another :class:`Command` with this group. If the name
is not provided, the name of the command is used.
Note: This is a custom Group that allows passing a list of names for
the command name.
"""
name = name or cmd.name
if name is None:
raise TypeError("Command has no name.")
_check_multicommand(self, name, cmd, register=True)
try:
self.commands[name] = cmd
except TypeError:
if isinstance(name, list):
for _name in name:
self.commands[_name] = cmd
class SugaredOption(click.Option):
def __init__(self, *args, **kwargs):
self.only_if_set: List = kwargs.pop("only_if_set")
kwargs["help"] = (
kwargs.get("help", "")
+ f". Option is acceptable only if {', '.join(self.only_if_set)} is used."
)
super().__init__(*args, **kwargs)
def handle_parse_result(self, ctx, opts, args):
current_opt = self.name in opts
if current_opt and self.only_if_set:
for opt in self.only_if_set:
if opt not in opts:
deafaults_set = [x.default for x in ctx.command.params if x.name == opt]
if not deafaults_set:
raise click.UsageError(f"Illegal Usage: Set '{opt}' before '{self.name}'.")
return super().handle_parse_result(ctx, opts, args)
def use_experimental_feature(ctx, param, value):
if not value:
return
if value == "dynamic-feed":
import bench.cli
bench.cli.dynamic_feed = True
bench.cli.verbose = True
else:
from bench.exceptions import FeatureDoesNotExistError
raise FeatureDoesNotExistError(f"Feature {value} does not exist")
from bench.cli import is_envvar_warn_set
if is_envvar_warn_set:
return
click.secho(
"WARNING: bench is using it's new CLI rendering engine. This behaviour has"
f" been enabled by passing --{value} in the command. This feature is"
" experimental and may not be implemented for all commands yet.",
fg="yellow",
)
def setup_verbosity(ctx, param, value):
if not value:
return
import bench.cli
bench.cli.verbose = True
|
2302_79757062/bench
|
bench/utils/cli.py
|
Python
|
agpl-3.0
| 2,320
|
# imports - standard imports
import sys
from io import StringIO
# imports - third party imports
import click
# imports - module imports
import bench
class Capturing(list):
"""
Util to consume the stdout encompassed in it and push it to a list
with Capturing() as output:
subprocess.check_output("ls", shell=True)
print(output)
# ["b'Applications\\nDesktop\\nDocuments\\nDownloads\\n'"]
"""
def __enter__(self):
self._stdout = sys.stdout
sys.stdout = self._stringio = StringIO()
return self
def __exit__(self, *args):
self.extend(self._stringio.getvalue().splitlines())
del self._stringio # free up some memory
sys.stdout = self._stdout
class Rendering:
def __init__(self, success, title, is_parent, args, kwargs):
import bench.cli
self.dynamic_feed = bench.cli.from_command_line and bench.cli.dynamic_feed
if not self.dynamic_feed:
return
try:
self.kw = args[0].__dict__
except Exception:
self.kw = kwargs
self.is_parent = is_parent
self.title = title
self.success = success
def __enter__(self, *args, **kwargs):
if not self.dynamic_feed:
return
_prefix = click.style("⏼", fg="bright_yellow")
_hierarchy = "" if self.is_parent else " "
self._title = self.title.format(**self.kw)
click.secho(f"{_hierarchy}{_prefix} {self._title}")
bench.LOG_BUFFER.append(
{
"message": self._title,
"prefix": _prefix,
"color": None,
"is_parent": self.is_parent,
}
)
def __exit__(self, *args, **kwargs):
if not self.dynamic_feed:
return
self._prefix = click.style("✔", fg="green")
self._success = self.success.format(**self.kw)
self.render_screen()
def render_screen(self):
click.clear()
for l in bench.LOG_BUFFER:
if l["message"] == self._title:
l["prefix"] = self._prefix
l["message"] = self._success
_hierarchy = "" if l.get("is_parent") else " "
click.secho(f'{_hierarchy}{l["prefix"]} {l["message"]}', fg=l["color"])
def job(title: str = None, success: str = None):
"""Supposed to be wrapped around an atomic job in a given process.
For instance, the `get-app` command consists of two jobs: `initializing bench`
and `fetching and installing app`.
"""
def innfn(fn):
def wrapper_fn(*args, **kwargs):
with Rendering(
success=success,
title=title,
is_parent=True,
args=args,
kwargs=kwargs,
):
return fn(*args, **kwargs)
return wrapper_fn
return innfn
def step(title: str = None, success: str = None):
"""Supposed to be wrapped around the smallest possible atomic step in a given operation.
For instance, `building assets` is a step in the update operation.
"""
def innfn(fn):
def wrapper_fn(*args, **kwargs):
with Rendering(
success=success,
title=title,
is_parent=False,
args=args,
kwargs=kwargs,
):
return fn(*args, **kwargs)
return wrapper_fn
return innfn
|
2302_79757062/bench
|
bench/utils/render.py
|
Python
|
agpl-3.0
| 2,890
|
# imports - standard imports
import grp
import os
import pwd
import shutil
import sys
# imports - module imports
import bench
from bench.utils import (
exec_cmd,
get_process_manager,
log,
run_frappe_cmd,
sudoers_file,
which,
is_valid_frappe_branch,
)
from bench.utils.bench import build_assets, clone_apps_from
from bench.utils.render import job
@job(title="Initializing Bench {path}", success="Bench {path} initialized")
def init(
path,
apps_path=None,
no_procfile=False,
no_backups=False,
frappe_path=None,
frappe_branch=None,
verbose=False,
clone_from=None,
skip_redis_config_generation=False,
clone_without_update=False,
skip_assets=False,
python="python3",
install_app=None,
dev=False,
):
"""Initialize a new bench directory
* create a bench directory in the given path
* setup logging for the bench
* setup env for the bench
* setup config (dir/pids/redis/procfile) for the bench
* setup patches.txt for bench
* clone & install frappe
* install python & node dependencies
* build assets
* setup backups crontab
"""
# Use print("\033c", end="") to clear entire screen after each step and re-render each list
# another way => https://stackoverflow.com/a/44591228/10309266
import bench.cli
from bench.app import get_app, install_apps_from_path
from bench.bench import Bench
verbose = bench.cli.verbose or verbose
bench = Bench(path)
bench.setup.dirs()
bench.setup.logging()
bench.setup.env(python=python)
config = {}
if dev:
config["developer_mode"] = 1
bench.setup.config(
redis=not skip_redis_config_generation,
procfile=not no_procfile,
additional_config=config,
)
bench.setup.patches()
# local apps
if clone_from:
clone_apps_from(
bench_path=path, clone_from=clone_from, update_app=not clone_without_update
)
# remote apps
else:
frappe_path = frappe_path or "https://github.com/frappe/frappe.git"
is_valid_frappe_branch(frappe_path=frappe_path, frappe_branch=frappe_branch)
get_app(
frappe_path,
branch=frappe_branch,
bench_path=path,
skip_assets=True,
verbose=verbose,
resolve_deps=False,
)
# fetch remote apps using config file - deprecate this!
if apps_path:
install_apps_from_path(apps_path, bench_path=path)
# getting app on bench init using --install-app
if install_app:
get_app(
install_app,
branch=frappe_branch,
bench_path=path,
skip_assets=True,
verbose=verbose,
resolve_deps=False,
)
if not skip_assets:
build_assets(bench_path=path)
if not no_backups:
bench.setup.backups()
def setup_sudoers(user):
from bench.config.lets_encrypt import get_certbot_path
if not os.path.exists("/etc/sudoers.d"):
os.makedirs("/etc/sudoers.d")
set_permissions = not os.path.exists("/etc/sudoers")
with open("/etc/sudoers", "a") as f:
f.write("\n#includedir /etc/sudoers.d\n")
if set_permissions:
os.chmod("/etc/sudoers", 0o440)
template = bench.config.env().get_template("frappe_sudoers")
frappe_sudoers = template.render(
**{
"user": user,
"service": which("service"),
"systemctl": which("systemctl"),
"nginx": which("nginx"),
"certbot": get_certbot_path(),
}
)
with open(sudoers_file, "w") as f:
f.write(frappe_sudoers)
os.chmod(sudoers_file, 0o440)
log(f"Sudoers was set up for user {user}", level=1)
def start(no_dev=False, concurrency=None, procfile=None, no_prefix=False, procman=None):
program = which(procman) if procman else get_process_manager()
if not program:
raise Exception("No process manager found")
os.environ["PYTHONUNBUFFERED"] = "true"
if not no_dev:
os.environ["DEV_SERVER"] = "true"
command = [program, "start"]
if concurrency:
command.extend(["-c", concurrency])
if procfile:
command.extend(["-f", procfile])
if no_prefix:
command.extend(["--no-prefix"])
os.execv(program, command)
def migrate_site(site, bench_path="."):
run_frappe_cmd("--site", site, "migrate", bench_path=bench_path)
def backup_site(site, bench_path="."):
run_frappe_cmd("--site", site, "backup", bench_path=bench_path)
def backup_all_sites(bench_path="."):
from bench.bench import Bench
for site in Bench(bench_path).sites:
backup_site(site, bench_path=bench_path)
def fix_prod_setup_perms(bench_path=".", frappe_user=None):
from glob import glob
from bench.bench import Bench
frappe_user = frappe_user or Bench(bench_path).conf.get("frappe_user")
if not frappe_user:
print("frappe user not set")
sys.exit(1)
globs = ["logs/*", "config/*"]
for glob_name in globs:
for path in glob(glob_name):
uid = pwd.getpwnam(frappe_user).pw_uid
gid = grp.getgrnam(frappe_user).gr_gid
os.chown(path, uid, gid)
def setup_fonts():
fonts_path = os.path.join("/tmp", "fonts")
if os.path.exists("/etc/fonts_backup"):
return
exec_cmd("git clone https://github.com/frappe/fonts.git", cwd="/tmp")
os.rename("/etc/fonts", "/etc/fonts_backup")
os.rename("/usr/share/fonts", "/usr/share/fonts_backup")
os.rename(os.path.join(fonts_path, "etc_fonts"), "/etc/fonts")
os.rename(os.path.join(fonts_path, "usr_share_fonts"), "/usr/share/fonts")
shutil.rmtree(fonts_path)
exec_cmd("fc-cache -fv")
|
2302_79757062/bench
|
bench/utils/system.py
|
Python
|
agpl-3.0
| 5,151
|
# imports - standard imports
import itertools
import json
import os
def update_translations_p(args):
import requests
try:
update_translations(*args)
except requests.exceptions.HTTPError:
print("Download failed for", args[0], args[1])
def download_translations_p():
import multiprocessing
pool = multiprocessing.Pool(multiprocessing.cpu_count())
langs = get_langs()
apps = ("frappe", "erpnext")
args = list(itertools.product(apps, langs))
pool.map(update_translations_p, args)
def download_translations():
langs = get_langs()
apps = ("frappe", "erpnext")
for app, lang in itertools.product(apps, langs):
update_translations(app, lang)
def get_langs():
lang_file = "apps/frappe/frappe/geo/languages.json"
with open(lang_file) as f:
langs = json.loads(f.read())
return [d["code"] for d in langs]
def update_translations(app, lang):
import requests
translations_dir = os.path.join("apps", app, app, "translations")
csv_file = os.path.join(translations_dir, f"{lang}.csv")
url = f"https://translate.erpnext.com/files/{app}-{lang}.csv"
r = requests.get(url, stream=True)
r.raise_for_status()
with open(csv_file, "wb") as f:
for chunk in r.iter_content(chunk_size=1024):
# filter out keep-alive new chunks
if chunk:
f.write(chunk)
f.flush()
print("downloaded for", app, lang)
|
2302_79757062/bench
|
bench/utils/translation.py
|
Python
|
agpl-3.0
| 1,333
|
_bench_completion() {
# Complete commands using click bashcomplete
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \
COMP_CWORD=$COMP_CWORD \
_BENCH_COMPLETE=complete $1 ) )
if [ -d "sites" ]; then
# Also add frappe commands if present
# bench_helper.py expects to be executed from "sites" directory
cd sites
# All frappe commands are subcommands under "bench frappe"
# Frappe is only installed in virtualenv "env" so use appropriate python executable
COMPREPLY+=( $( COMP_WORDS="bench frappe "${COMP_WORDS[@]:1} \
COMP_CWORD=$(($COMP_CWORD+1)) \
_BENCH_COMPLETE=complete ../env/bin/python ../apps/frappe/frappe/utils/bench_helper.py ) )
# If the word before the current cursor position in command typed so far is "--site" then only list sites
if [ ${COMP_WORDS[COMP_CWORD-1]} == "--site" ]; then
COMPREPLY=( $( ls -d ./*/site_config.json | cut -f 2 -d "/" | xargs echo ) )
fi
# Get out of sites directory now
cd ..
fi
return 0
}
# Only support bash and zsh
if [ -n "$BASH" ] ; then
complete -F _bench_completion -o default bench;
elif [ -n "$ZSH_VERSION" ]; then
# Use zsh in bash compatibility mode
autoload bashcompinit
bashcompinit
complete -F _bench_completion -o default bench;
fi
|
2302_79757062/bench
|
completion.sh
|
Shell
|
agpl-3.0
| 1,415
|
#!/usr/bin/env python3
import argparse
import fileinput
import logging
import os
import platform
import subprocess
import sys
import time
import urllib.request
from shutil import move, unpack_archive, which
from typing import Dict
logging.basicConfig(
filename="easy-install.log",
filemode="w",
format="%(asctime)s - %(levelname)s - %(message)s",
level=logging.INFO,
)
def cprint(*args, level: int = 1):
"""
logs colorful messages
level = 1 : RED
level = 2 : GREEN
level = 3 : YELLOW
default level = 1
"""
CRED = "\033[31m"
CGRN = "\33[92m"
CYLW = "\33[93m"
reset = "\033[0m"
message = " ".join(map(str, args))
if level == 1:
print(CRED, message, reset)
if level == 2:
print(CGRN, message, reset)
if level == 3:
print(CYLW, message, reset)
def clone_frappe_docker_repo() -> None:
try:
urllib.request.urlretrieve(
"https://github.com/frappe/frappe_docker/archive/refs/heads/main.zip",
"frappe_docker.zip",
)
logging.info("Downloaded frappe_docker zip file from GitHub")
unpack_archive(
"frappe_docker.zip", "."
) # Unzipping the frappe_docker.zip creates a folder "frappe_docker-main"
move("frappe_docker-main", "frappe_docker")
logging.info("Unzipped and Renamed frappe_docker")
os.remove("frappe_docker.zip")
logging.info("Removed the downloaded zip file")
except Exception as e:
logging.error("Download and unzip failed", exc_info=True)
cprint("\nCloning frappe_docker Failed\n\n", "[ERROR]: ", e, level=1)
def get_from_env(dir, file) -> Dict:
env_vars = {}
with open(os.path.join(dir, file)) as f:
for line in f:
if line.startswith("#") or not line.strip():
continue
key, value = line.strip().split("=", 1)
env_vars[key] = value
return env_vars
def write_to_env(
wd: str,
sites,
db_pass: str,
admin_pass: str,
email: str,
erpnext_version: str = None,
) -> None:
quoted_sites = ",".join([f"`{site}`" for site in sites]).strip(",")
example_env = get_from_env(wd, "example.env")
erpnext_version = erpnext_version or example_env["ERPNEXT_VERSION"]
with open(os.path.join(wd, ".env"), "w") as f:
f.writelines(
[
f"ERPNEXT_VERSION={erpnext_version}\n", # defaults to latest version of ERPNext
f"DB_PASSWORD={db_pass}\n",
"DB_HOST=db\n",
"DB_PORT=3306\n",
"REDIS_CACHE=redis-cache:6379\n",
"REDIS_QUEUE=redis-queue:6379\n",
"REDIS_SOCKETIO=redis-socketio:6379\n",
f"LETSENCRYPT_EMAIL={email}\n",
f"SITE_ADMIN_PASS={admin_pass}\n",
f"SITES={quoted_sites}\n",
]
)
def generate_pass(length: int = 12) -> str:
"""Generate random hash using best available randomness source."""
import math
import secrets
if not length:
length = 56
return secrets.token_hex(math.ceil(length / 2))[:length]
def check_repo_exists() -> bool:
return os.path.exists(os.path.join(os.getcwd(), "frappe_docker"))
def setup_prod(project: str, sites, email: str, version: str = None, image = None) -> None:
if len(sites) == 0:
sites = ["site1.localhost"]
if check_repo_exists():
compose_file_name = os.path.join(os.path.expanduser("~"), f"{project}-compose.yml")
docker_repo_path = os.path.join(os.getcwd(), "frappe_docker")
cprint(
"\nPlease refer to .example.env file in the frappe_docker folder to know which keys to set\n\n",
level=3,
)
admin_pass = ""
db_pass = ""
with open(compose_file_name, "w") as f:
# Writing to compose file
if not os.path.exists(os.path.join(docker_repo_path, ".env")):
admin_pass = generate_pass()
db_pass = generate_pass(9)
write_to_env(docker_repo_path, sites, db_pass, admin_pass, email, version)
cprint(
"\nA .env file is generated with basic configs. Please edit it to fit to your needs \n",
level=3,
)
with open(os.path.join(os.path.expanduser("~"), "passwords.txt"), "w") as en:
en.writelines(f"ADMINISTRATOR_PASSWORD={admin_pass}\n")
en.writelines(f"MARIADB_ROOT_PASSWORD={db_pass}\n")
else:
env = get_from_env(docker_repo_path, ".env")
admin_pass = env["SITE_ADMIN_PASS"]
db_pass = env["DB_PASSWORD"]
try:
# TODO: Include flags for non-https and non-erpnext installation
subprocess.run(
[
which("docker"),
"compose",
"--project-name",
project,
"-f",
"compose.yaml",
"-f",
"overrides/compose.mariadb.yaml",
"-f",
"overrides/compose.redis.yaml",
# "-f", "overrides/compose.noproxy.yaml", TODO: Add support for local proxying without HTTPs
"-f",
"overrides/compose.https.yaml",
"--env-file",
".env",
"config",
],
cwd=docker_repo_path,
stdout=f,
check=True,
)
except Exception:
logging.error("Docker Compose generation failed", exc_info=True)
cprint("\nGenerating Compose File failed\n")
sys.exit(1)
# Use custom image
if image:
for line in fileinput.input(compose_file_name, inplace=True):
if "image: frappe/erpnext" in line:
line = line.replace("image: frappe/erpnext", f"image: {image}")
sys.stdout.write(line)
try:
# Starting with generated compose file
subprocess.run(
[
which("docker"),
"compose",
"-p",
project,
"-f",
compose_file_name,
"up",
"-d",
],
check=True,
)
logging.info(f"Docker Compose file generated at ~/{project}-compose.yml")
except Exception as e:
logging.error("Prod docker-compose failed", exc_info=True)
cprint(" Docker Compose failed, please check the container logs\n", e)
sys.exit(1)
for sitename in sites:
create_site(sitename, project, db_pass, admin_pass)
else:
install_docker()
clone_frappe_docker_repo()
setup_prod(project, sites, email, version, image) # Recursive
def setup_dev_instance(project: str):
if check_repo_exists():
try:
subprocess.run(
[
"docker",
"compose",
"-f",
"devcontainer-example/docker-compose.yml",
"--project-name",
project,
"up",
"-d",
],
cwd=os.path.join(os.getcwd(), "frappe_docker"),
check=True,
)
cprint(
"Please go through the Development Documentation: https://github.com/frappe/frappe_docker/tree/main/docs/development.md to fully complete the setup.",
level=2,
)
logging.info("Development Setup completed")
except Exception as e:
logging.error("Dev Environment setup failed", exc_info=True)
cprint("Setting Up Development Environment Failed\n", e)
else:
install_docker()
clone_frappe_docker_repo()
setup_dev_instance(project) # Recursion on goes brrrr
def install_docker():
if which("docker") is not None:
return
cprint("Docker is not installed, Installing Docker...", level=3)
logging.info("Docker not found, installing Docker")
if platform.system() == "Darwin" or platform.system() == "Windows":
print(
f"""
This script doesn't install Docker on {"Mac" if platform.system()=="Darwin" else "Windows"}.
Please go through the Docker Installation docs for your system and run this script again"""
)
logging.debug("Docker setup failed due to platform is not Linux")
sys.exit(1)
try:
ps = subprocess.run(
["curl", "-fsSL", "https://get.docker.com"],
capture_output=True,
check=True,
)
subprocess.run(["/bin/bash"], input=ps.stdout, capture_output=True)
subprocess.run(
["sudo", "usermod", "-aG", "docker", str(os.getenv("USER"))], check=True
)
cprint("Waiting Docker to start", level=3)
time.sleep(10)
subprocess.run(["sudo", "systemctl", "restart", "docker.service"], check=True)
except Exception as e:
logging.error("Installing Docker failed", exc_info=True)
cprint("Failed to Install Docker\n", e)
cprint("\n Try Installing Docker Manually and re-run this script again\n")
sys.exit(1)
def create_site(
sitename: str,
project: str,
db_pass: str,
admin_pass: str,
):
cprint(f"\nCreating site: {sitename} \n", level=3)
try:
subprocess.run(
[
which("docker"),
"compose",
"-p",
project,
"exec",
"backend",
"bench",
"new-site",
sitename,
"--no-mariadb-socket",
"--db-root-password",
db_pass,
"--admin-password",
admin_pass,
"--install-app",
"erpnext",
"--set-default",
],
check=True,
)
logging.info("New site creation completed")
except Exception as e:
logging.error(f"Bench site creation failed for {sitename}", exc_info=True)
cprint(f"Bench Site creation failed for {sitename}\n", e)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Install Frappe with Docker")
parser.add_argument(
"-p", "--prod", help="Setup Production System", action="store_true"
)
parser.add_argument(
"-d", "--dev", help="Setup Development System", action="store_true"
)
parser.add_argument(
"-s",
"--sitename",
help="Site Name(s) for your production bench",
default=[],
action="append",
dest="sites",
)
parser.add_argument("-n", "--project", help="Project Name", default="frappe")
parser.add_argument("-i", "--image", help="Full Image Name")
parser.add_argument(
"--email", help="Add email for the SSL.", required="--prod" in sys.argv
)
parser.add_argument(
"-v", "--version", help="ERPNext version to install, defaults to latest stable"
)
args = parser.parse_args()
if args.dev:
cprint("\nSetting Up Development Instance\n", level=2)
logging.info("Running Development Setup")
setup_dev_instance(args.project)
elif args.prod:
cprint("\nSetting Up Production Instance\n", level=2)
logging.info("Running Production Setup")
if "example.com" in args.email:
cprint("Emails with example.com not acceptable", level=1)
sys.exit(1)
setup_prod(args.project, args.sites, args.email, args.version, args.image)
else:
parser.print_help()
|
2302_79757062/bench
|
easy-install.py
|
Python
|
agpl-3.0
| 9,743
|
__version__ = "2.0.0-dev"
__title__ = "Frappe CRM"
|
2302_79757062/crm
|
crm/__init__.py
|
Python
|
agpl-3.0
| 53
|
from bs4 import BeautifulSoup
import frappe
from frappe.translate import get_all_translations
from frappe.utils import validate_email_address, split_emails, cstr
from frappe.utils.telemetry import POSTHOG_HOST_FIELD, POSTHOG_PROJECT_FIELD
@frappe.whitelist(allow_guest=True)
def get_translations():
if frappe.session.user != "Guest":
language = frappe.db.get_value("User", frappe.session.user, "language")
else:
language = frappe.db.get_single_value("System Settings", "language")
return get_all_translations(language)
@frappe.whitelist()
def get_user_signature():
user = frappe.session.user
user_email_signature = (
frappe.db.get_value(
"User",
user,
"email_signature",
)
if user
else None
)
signature = user_email_signature or frappe.db.get_value(
"Email Account",
{"default_outgoing": 1, "add_signature": 1},
"signature",
)
if not signature:
return
soup = BeautifulSoup(signature, "html.parser")
html_signature = soup.find("div", {"class": "ql-editor read-mode"})
_signature = None
if html_signature:
_signature = html_signature.renderContents()
content = ""
if (cstr(_signature) or signature):
content = f'<br><p class="signature">{signature}</p>'
return content
@frappe.whitelist()
def get_posthog_settings():
return {
"posthog_project_id": frappe.conf.get(POSTHOG_PROJECT_FIELD),
"posthog_host": frappe.conf.get(POSTHOG_HOST_FIELD),
"enable_telemetry": frappe.get_system_settings("enable_telemetry"),
"telemetry_site_age": frappe.utils.telemetry.site_age(),
}
def check_app_permission():
if frappe.session.user == "Administrator":
return True
roles = frappe.get_roles()
if any(role in ["System Manager", "Sales User", "Sales Manager", "Sales Master Manager"] for role in roles):
return True
return False
@frappe.whitelist(allow_guest=True)
def accept_invitation(key: str = None):
if not key:
frappe.throw("Invalid or expired key")
result = frappe.db.get_all("CRM Invitation", filters={"key": key}, pluck="name")
if not result:
frappe.throw("Invalid or expired key")
invitation = frappe.get_doc("CRM Invitation", result[0])
invitation.accept()
invitation.reload()
if invitation.status == "Accepted":
frappe.local.login_manager.login_as(invitation.email)
frappe.local.response["type"] = "redirect"
frappe.local.response["location"] = "/crm"
@frappe.whitelist()
def invite_by_email(emails: str, role: str):
if not emails:
return
email_string = validate_email_address(emails, throw=False)
email_list = split_emails(email_string)
if not email_list:
return
existing_members = frappe.db.get_all("User", filters={"email": ["in", email_list]}, pluck="email")
existing_invites = frappe.db.get_all(
"CRM Invitation",
filters={"email": ["in", email_list], "role": ["in", ["Sales Manager", "Sales User"]]},
pluck="email",
)
to_invite = list(set(email_list) - set(existing_members) - set(existing_invites))
for email in to_invite:
frappe.get_doc(doctype="CRM Invitation", email=email, role=role).insert(ignore_permissions=True)
|
2302_79757062/crm
|
crm/api/__init__.py
|
Python
|
agpl-3.0
| 3,048
|
import json
import frappe
from frappe import _
from frappe.utils.caching import redis_cache
from frappe.desk.form.load import get_docinfo
@frappe.whitelist()
def get_activities(name):
if frappe.db.exists("CRM Deal", name):
return get_deal_activities(name)
elif frappe.db.exists("CRM Lead", name):
return get_lead_activities(name)
else:
frappe.throw(_("Document not found"), frappe.DoesNotExistError)
def get_deal_activities(name):
get_docinfo('', "CRM Deal", name)
docinfo = frappe.response["docinfo"]
deal_meta = frappe.get_meta("CRM Deal")
deal_fields = {field.fieldname: {"label": field.label, "options": field.options} for field in deal_meta.fields}
avoid_fields = [
"lead",
"response_by",
"sla_creation",
"sla",
"first_response_time",
"first_responded_on",
]
doc = frappe.db.get_values("CRM Deal", name, ["creation", "owner", "lead"])[0]
lead = doc[2]
activities = []
calls = []
notes = []
tasks = []
creation_text = "created this deal"
if lead:
activities, calls, notes, tasks = get_lead_activities(lead)
creation_text = "converted the lead to this deal"
activities.append({
"activity_type": "creation",
"creation": doc[0],
"owner": doc[1],
"data": creation_text,
"is_lead": False,
})
docinfo.versions.reverse()
for version in docinfo.versions:
data = json.loads(version.data)
if not data.get("changed"):
continue
if change := data.get("changed")[0]:
field = deal_fields.get(change[0], None)
if not field or change[0] in avoid_fields or (not change[1] and not change[2]):
continue
field_label = field.get("label") or change[0]
field_option = field.get("options") or None
activity_type = "changed"
data = {
"field": change[0],
"field_label": field_label,
"old_value": change[1],
"value": change[2],
}
if not change[1] and change[2]:
activity_type = "added"
data = {
"field": change[0],
"field_label": field_label,
"value": change[2],
}
elif change[1] and not change[2]:
activity_type = "removed"
data = {
"field": change[0],
"field_label": field_label,
"value": change[1],
}
activity = {
"activity_type": activity_type,
"creation": version.creation,
"owner": version.owner,
"data": data,
"is_lead": False,
"options": field_option,
}
activities.append(activity)
for comment in docinfo.comments:
activity = {
"name": comment.name,
"activity_type": "comment",
"creation": comment.creation,
"owner": comment.owner,
"content": comment.content,
"attachments": get_attachments('Comment', comment.name),
"is_lead": False,
}
activities.append(activity)
for communication in docinfo.communications + docinfo.automated_messages:
activity = {
"activity_type": "communication",
"communication_type": communication.communication_type,
"creation": communication.creation,
"data": {
"subject": communication.subject,
"content": communication.content,
"sender_full_name": communication.sender_full_name,
"sender": communication.sender,
"recipients": communication.recipients,
"cc": communication.cc,
"bcc": communication.bcc,
"attachments": get_attachments('Communication', communication.name),
"read_by_recipient": communication.read_by_recipient,
},
"is_lead": False,
}
activities.append(activity)
calls = calls + get_linked_calls(name)
notes = notes + get_linked_notes(name)
tasks = tasks + get_linked_tasks(name)
activities.sort(key=lambda x: x["creation"], reverse=True)
activities = handle_multiple_versions(activities)
return activities, calls, notes, tasks
def get_lead_activities(name):
get_docinfo('', "CRM Lead", name)
docinfo = frappe.response["docinfo"]
lead_meta = frappe.get_meta("CRM Lead")
lead_fields = {field.fieldname: {"label": field.label, "options": field.options} for field in lead_meta.fields}
avoid_fields = [
"converted",
"response_by",
"sla_creation",
"sla",
"first_response_time",
"first_responded_on",
]
doc = frappe.db.get_values("CRM Lead", name, ["creation", "owner"])[0]
activities = [{
"activity_type": "creation",
"creation": doc[0],
"owner": doc[1],
"data": "created this lead",
"is_lead": True,
}]
docinfo.versions.reverse()
for version in docinfo.versions:
data = json.loads(version.data)
if not data.get("changed"):
continue
if change := data.get("changed")[0]:
field = lead_fields.get(change[0], None)
if not field or change[0] in avoid_fields or (not change[1] and not change[2]):
continue
field_label = field.get("label") or change[0]
field_option = field.get("options") or None
activity_type = "changed"
data = {
"field": change[0],
"field_label": field_label,
"old_value": change[1],
"value": change[2],
}
if not change[1] and change[2]:
activity_type = "added"
data = {
"field": change[0],
"field_label": field_label,
"value": change[2],
}
elif change[1] and not change[2]:
activity_type = "removed"
data = {
"field": change[0],
"field_label": field_label,
"value": change[1],
}
activity = {
"activity_type": activity_type,
"creation": version.creation,
"owner": version.owner,
"data": data,
"is_lead": True,
"options": field_option,
}
activities.append(activity)
for comment in docinfo.comments:
activity = {
"name": comment.name,
"activity_type": "comment",
"creation": comment.creation,
"owner": comment.owner,
"content": comment.content,
"attachments": get_attachments('Comment', comment.name),
"is_lead": True,
}
activities.append(activity)
for communication in docinfo.communications + docinfo.automated_messages:
activity = {
"activity_type": "communication",
"communication_type": communication.communication_type,
"creation": communication.creation,
"data": {
"subject": communication.subject,
"content": communication.content,
"sender_full_name": communication.sender_full_name,
"sender": communication.sender,
"recipients": communication.recipients,
"cc": communication.cc,
"bcc": communication.bcc,
"attachments": get_attachments('Communication', communication.name),
"read_by_recipient": communication.read_by_recipient,
},
"is_lead": True,
}
activities.append(activity)
calls = get_linked_calls(name)
notes = get_linked_notes(name)
tasks = get_linked_tasks(name)
activities.sort(key=lambda x: x["creation"], reverse=True)
activities = handle_multiple_versions(activities)
return activities, calls, notes, tasks
@redis_cache()
def get_attachments(doctype, name):
return frappe.db.get_all(
"File",
filters={"attached_to_doctype": doctype, "attached_to_name": name},
fields=["name", "file_name", "file_url", "file_size", "is_private"],
)
def handle_multiple_versions(versions):
activities = []
grouped_versions = []
old_version = None
for version in versions:
is_version = version["activity_type"] in ["changed", "added", "removed"]
if not is_version:
activities.append(version)
if not old_version:
old_version = version
if is_version: grouped_versions.append(version)
continue
if is_version and old_version.get("owner") and version["owner"] == old_version["owner"]:
grouped_versions.append(version)
else:
if grouped_versions:
activities.append(parse_grouped_versions(grouped_versions))
grouped_versions = []
if is_version: grouped_versions.append(version)
old_version = version
if version == versions[-1] and grouped_versions:
activities.append(parse_grouped_versions(grouped_versions))
return activities
def parse_grouped_versions(versions):
version = versions[0]
if len(versions) == 1:
return version
other_versions = versions[1:]
version["other_versions"] = other_versions
return version
def get_linked_calls(name):
calls = frappe.db.get_all(
"CRM Call Log",
filters={"reference_docname": name},
fields=[
"name",
"caller",
"receiver",
"from",
"to",
"duration",
"start_time",
"end_time",
"status",
"type",
"recording_url",
"creation",
"note",
],
)
return calls or []
def get_linked_notes(name):
notes = frappe.db.get_all(
"FCRM Note",
filters={"reference_docname": name},
fields=['name', 'title', 'content', 'owner', 'modified'],
)
return notes or []
def get_linked_tasks(name):
tasks = frappe.db.get_all(
"CRM Task",
filters={"reference_docname": name},
fields=[
"name",
"title",
"description",
"assigned_to",
"assigned_to",
"due_date",
"priority",
"status",
"modified",
],
)
return tasks or []
|
2302_79757062/crm
|
crm/api/activities.py
|
Python
|
agpl-3.0
| 8,692
|
import frappe
@frappe.whitelist(allow_guest=True)
def oauth_providers():
from frappe.utils.html_utils import get_icon_html
from frappe.utils.password import get_decrypted_password
from frappe.utils.oauth import get_oauth2_authorize_url, get_oauth_keys
out = []
providers = frappe.get_all(
"Social Login Key",
filters={"enable_social_login": 1},
fields=["name", "client_id", "base_url", "provider_name", "icon"],
order_by="name",
)
for provider in providers:
client_secret = get_decrypted_password("Social Login Key", provider.name, "client_secret")
if not client_secret:
continue
icon = None
if provider.icon:
if provider.provider_name == "Custom":
icon = get_icon_html(provider.icon, small=True)
else:
icon = f"<img src='{provider.icon}' alt={provider.provider_name}>"
if provider.client_id and provider.base_url and get_oauth_keys(provider.name):
out.append(
{
"name": provider.name,
"provider_name": provider.provider_name,
"auth_url": get_oauth2_authorize_url(provider.name, "/crm"),
"icon": icon,
}
)
return out
|
2302_79757062/crm
|
crm/api/auth.py
|
Python
|
agpl-3.0
| 1,098
|
from collections.abc import Iterable
import frappe
from frappe import _
from bs4 import BeautifulSoup
def on_update(self, method):
notify_mentions(self)
def notify_mentions(doc):
"""
Extract mentions from `content`, and notify.
`content` must have `HTML` content.
"""
content = getattr(doc, "content", None)
if not content:
return
mentions = extract_mentions(content)
for mention in mentions:
owner = frappe.get_cached_value("User", doc.owner, "full_name")
doctype = doc.reference_doctype
if doctype.startswith("CRM "):
doctype = doctype[4:].lower()
notification_text = f"""
<div class="mb-2 leading-5 text-gray-600">
<span class="font-medium text-gray-900">{ owner }</span>
<span>{ _('mentioned you in {0}').format(doctype) }</span>
<span class="font-medium text-gray-900">{ doc.reference_name }</span>
</div>
"""
values = frappe._dict(
doctype="CRM Notification",
from_user=doc.owner,
to_user=mention.email,
type="Mention",
message=doc.content,
notification_text=notification_text,
notification_type_doctype="Comment",
notification_type_doc=doc.name,
reference_doctype=doc.reference_doctype,
reference_name=doc.reference_name,
)
if frappe.db.exists("CRM Notification", values):
return
frappe.get_doc(values).insert()
def extract_mentions(html):
if not html:
return []
soup = BeautifulSoup(html, "html.parser")
mentions = []
for d in soup.find_all("span", attrs={"data-type": "mention"}):
mentions.append(
frappe._dict(full_name=d.get("data-label"), email=d.get("data-id"))
)
return mentions
@frappe.whitelist()
def add_attachments(name: str, attachments: Iterable[str | dict]) -> None:
"""Add attachments to the given Comment
:param name: Comment name
:param attachments: File names or dicts with keys "fname" and "fcontent"
"""
# loop through attachments
for a in attachments:
if isinstance(a, str):
attach = frappe.db.get_value("File", {"name": a}, ["file_url", "is_private"], as_dict=1)
file_args = {
"file_url": attach.file_url,
"is_private": attach.is_private,
}
elif isinstance(a, dict) and "fcontent" in a and "fname" in a:
# dict returned by frappe.attach_print()
file_args = {
"file_name": a["fname"],
"content": a["fcontent"],
"is_private": 1,
}
else:
continue
file_args.update(
{
"attached_to_doctype": "Comment",
"attached_to_name": name,
"folder": "Home/Attachments",
}
)
_file = frappe.new_doc("File")
_file.update(file_args)
_file.save(ignore_permissions=True)
|
2302_79757062/crm
|
crm/api/comment.py
|
Python
|
agpl-3.0
| 2,857
|
import frappe
from frappe import _
def validate(doc, method):
set_primary_email(doc)
set_primary_mobile_no(doc)
doc.set_primary_email()
doc.set_primary("mobile_no")
def set_primary_email(doc):
if not doc.email_ids:
return
if len(doc.email_ids) == 1:
doc.email_ids[0].is_primary = 1
def set_primary_mobile_no(doc):
if not doc.phone_nos:
return
if len(doc.phone_nos) == 1:
doc.phone_nos[0].is_primary_mobile_no = 1
@frappe.whitelist()
def get_contact(name):
Contact = frappe.qb.DocType("Contact")
query = (
frappe.qb.from_(Contact)
.select("*")
.where(Contact.name == name)
.limit(1)
)
contact = query.run(as_dict=True)
if not len(contact):
frappe.throw(_("Contact not found"), frappe.DoesNotExistError)
contact = contact.pop()
contact["doctype"] = "Contact"
contact["email_ids"] = frappe.get_all(
"Contact Email", filters={"parent": name}, fields=["name", "email_id", "is_primary"]
)
contact["phone_nos"] = frappe.get_all(
"Contact Phone", filters={"parent": name}, fields=["name", "phone", "is_primary_mobile_no"]
)
return contact
@frappe.whitelist()
def get_linked_deals(contact):
"""Get linked deals for a contact"""
if not frappe.has_permission("Contact", "read", contact):
frappe.throw("Not permitted", frappe.PermissionError)
deal_names = frappe.get_all(
"CRM Contacts",
filters={"contact": contact, "parenttype": "CRM Deal"},
fields=["parent"],
distinct=True,
)
# get deals data
deals = []
for d in deal_names:
deal = frappe.get_cached_doc(
"CRM Deal",
d.parent,
fields=[
"name",
"organization",
"currency",
"annual_revenue",
"status",
"email",
"mobile_no",
"deal_owner",
"modified",
],
)
deals.append(deal.as_dict())
return deals
@frappe.whitelist()
def create_new(contact, field, value):
"""Create new email or phone for a contact"""
if not frappe.has_permission("Contact", "write", contact):
frappe.throw("Not permitted", frappe.PermissionError)
contact = frappe.get_doc("Contact", contact)
if field == "email":
contact.append("email_ids", {"email_id": value})
elif field in ("mobile_no", "phone"):
contact.append("phone_nos", {"phone": value})
else:
frappe.throw("Invalid field")
contact.save()
return True
@frappe.whitelist()
def set_as_primary(contact, field, value):
"""Set email or phone as primary for a contact"""
if not frappe.has_permission("Contact", "write", contact):
frappe.throw("Not permitted", frappe.PermissionError)
contact = frappe.get_doc("Contact", contact)
if field == "email":
for email in contact.email_ids:
if email.email_id == value:
email.is_primary = 1
else:
email.is_primary = 0
elif field in ("mobile_no", "phone"):
name = "is_primary_mobile_no" if field == "mobile_no" else "is_primary_phone"
for phone in contact.phone_nos:
if phone.phone == value:
phone.set(name, 1)
else:
phone.set(name, 0)
else:
frappe.throw("Invalid field")
contact.save()
return True
@frappe.whitelist()
def search_emails(txt: str):
doctype = "Contact"
meta = frappe.get_meta(doctype)
filters = [["Contact", "email_id", "is", "set"]]
if meta.get("fields", {"fieldname": "enabled", "fieldtype": "Check"}):
filters.append([doctype, "enabled", "=", 1])
if meta.get("fields", {"fieldname": "disabled", "fieldtype": "Check"}):
filters.append([doctype, "disabled", "!=", 1])
or_filters = []
search_fields = ["full_name", "email_id", "name"]
if txt:
for f in search_fields:
or_filters.append([doctype, f.strip(), "like", f"%{txt}%"])
results = frappe.get_list(
doctype,
filters=filters,
fields=search_fields,
or_filters=or_filters,
limit_start=0,
limit_page_length=20,
order_by='email_id, full_name, name',
ignore_permissions=False,
as_list=True,
strict=False,
)
return results
|
2302_79757062/crm
|
crm/api/contact.py
|
Python
|
agpl-3.0
| 3,833
|
import frappe
import json
from frappe import _
from frappe.model.document import get_controller
from frappe.model import no_value_fields
from pypika import Criterion
from frappe.utils import make_filter_tuple
from crm.api.views import get_views
from crm.fcrm.doctype.crm_form_script.crm_form_script import get_form_script
@frappe.whitelist()
def sort_options(doctype: str):
fields = frappe.get_meta(doctype).fields
fields = [field for field in fields if field.fieldtype not in no_value_fields]
fields = [
{
"label": _(field.label),
"value": field.fieldname,
}
for field in fields
if field.label and field.fieldname
]
standard_fields = [
{"label": "Name", "value": "name"},
{"label": "Created On", "value": "creation"},
{"label": "Last Modified", "value": "modified"},
{"label": "Modified By", "value": "modified_by"},
{"label": "Owner", "value": "owner"},
]
for field in standard_fields:
field["label"] = _(field["label"])
fields.append(field)
return fields
@frappe.whitelist()
def get_filterable_fields(doctype: str):
allowed_fieldtypes = [
"Check",
"Data",
"Float",
"Int",
"Currency",
"Dynamic Link",
"Link",
"Long Text",
"Select",
"Small Text",
"Text Editor",
"Text",
"Duration",
"Date",
"Datetime",
]
c = get_controller(doctype)
restricted_fields = []
if hasattr(c, "get_non_filterable_fields"):
restricted_fields = c.get_non_filterable_fields()
res = []
# append DocFields
DocField = frappe.qb.DocType("DocField")
doc_fields = get_doctype_fields_meta(DocField, doctype, allowed_fieldtypes, restricted_fields)
res.extend(doc_fields)
# append Custom Fields
CustomField = frappe.qb.DocType("Custom Field")
custom_fields = get_doctype_fields_meta(CustomField, doctype, allowed_fieldtypes, restricted_fields)
res.extend(custom_fields)
# append standard fields (getting error when using frappe.model.std_fields)
standard_fields = [
{"fieldname": "name", "fieldtype": "Link", "label": "ID", "options": doctype},
{
"fieldname": "owner",
"fieldtype": "Link",
"label": "Created By",
"options": "User"
},
{
"fieldname": "modified_by",
"fieldtype": "Link",
"label": "Last Updated By",
"options": "User",
},
{"fieldname": "_user_tags", "fieldtype": "Data", "label": "Tags"},
{"fieldname": "_liked_by", "fieldtype": "Data", "label": "Like"},
{"fieldname": "_comments", "fieldtype": "Text", "label": "Comments"},
{"fieldname": "_assign", "fieldtype": "Text", "label": "Assigned To"},
{"fieldname": "creation", "fieldtype": "Datetime", "label": "Created On"},
{"fieldname": "modified", "fieldtype": "Datetime", "label": "Last Updated On"},
]
for field in standard_fields:
if (
field.get("fieldname") not in restricted_fields and
field.get("fieldtype") in allowed_fieldtypes
):
field["name"] = field.get("fieldname")
res.append(field)
for field in res:
field["label"] = _(field.get("label"))
return res
@frappe.whitelist()
def get_group_by_fields(doctype: str):
allowed_fieldtypes = [
"Check",
"Data",
"Float",
"Int",
"Currency",
"Dynamic Link",
"Link",
"Select",
"Duration",
"Date",
"Datetime",
]
fields = frappe.get_meta(doctype).fields
fields = [field for field in fields if field.fieldtype not in no_value_fields and field.fieldtype in allowed_fieldtypes]
fields = [
{
"label": _(field.label),
"value": field.fieldname,
}
for field in fields
if field.label and field.fieldname
]
standard_fields = [
{"label": "Name", "value": "name"},
{"label": "Created On", "value": "creation"},
{"label": "Last Modified", "value": "modified"},
{"label": "Modified By", "value": "modified_by"},
{"label": "Owner", "value": "owner"},
{"label": "Liked By", "value": "_liked_by"},
{"label": "Assigned To", "value": "_assign"},
{"label": "Comments", "value": "_comments"},
{"label": "Created On", "value": "creation"},
{"label": "Modified On", "value": "modified"},
]
for field in standard_fields:
field["label"] = _(field["label"])
fields.append(field)
return fields
def get_doctype_fields_meta(DocField, doctype, allowed_fieldtypes, restricted_fields):
parent = "parent" if DocField._table_name == "tabDocField" else "dt"
return (
frappe.qb.from_(DocField)
.select(
DocField.fieldname,
DocField.fieldtype,
DocField.label,
DocField.name,
DocField.options,
)
.where(DocField[parent] == doctype)
.where(DocField.hidden == False)
.where(Criterion.any([DocField.fieldtype == i for i in allowed_fieldtypes]))
.where(Criterion.all([DocField.fieldname != i for i in restricted_fields]))
.run(as_dict=True)
)
@frappe.whitelist()
def get_quick_filters(doctype: str):
meta = frappe.get_meta(doctype)
fields = [field for field in meta.fields if field.in_standard_filter]
quick_filters = []
for field in fields:
if field.fieldtype == "Select":
field.options = field.options.split("\n")
field.options = [{"label": option, "value": option} for option in field.options]
field.options.insert(0, {"label": "", "value": ""})
quick_filters.append({
"label": _(field.label),
"name": field.fieldname,
"type": field.fieldtype,
"options": field.options,
})
if doctype == "CRM Lead":
quick_filters = [filter for filter in quick_filters if filter.get("name") != "converted"]
return quick_filters
@frappe.whitelist()
def get_data(
doctype: str,
filters: dict,
order_by: str,
page_length=20,
page_length_count=20,
column_field=None,
title_field=None,
columns=[],
rows=[],
kanban_columns=[],
kanban_fields=[],
view=None,
default_filters=None,
):
custom_view = False
filters = frappe._dict(filters)
rows = frappe.parse_json(rows or "[]")
columns = frappe.parse_json(columns or "[]")
kanban_fields = frappe.parse_json(kanban_fields or "[]")
kanban_columns = frappe.parse_json(kanban_columns or "[]")
custom_view_name = view.get('custom_view_name') if view else None
view_type = view.get('view_type') if view else None
group_by_field = view.get('group_by_field') if view else None
for key in filters:
value = filters[key]
if isinstance(value, list):
if "@me" in value:
value[value.index("@me")] = frappe.session.user
elif "%@me%" in value:
index = [i for i, v in enumerate(value) if v == "%@me%"]
for i in index:
value[i] = "%" + frappe.session.user + "%"
elif value == "@me":
filters[key] = frappe.session.user
if default_filters:
default_filters = frappe.parse_json(default_filters)
filters.update(default_filters)
is_default = True
data = []
_list = get_controller(doctype)
default_rows = []
if hasattr(_list, "default_list_data"):
default_rows = _list.default_list_data().get("rows")
if view_type != "kanban":
if columns or rows:
custom_view = True
is_default = False
columns = frappe.parse_json(columns)
rows = frappe.parse_json(rows)
if not columns:
columns = [
{"label": "Name", "type": "Data", "key": "name", "width": "16rem"},
{"label": "Last Modified", "type": "Datetime", "key": "modified", "width": "8rem"},
]
if not rows:
rows = ["name"]
default_view_filters = {
"dt": doctype,
"type": view_type or 'list',
"is_default": 1,
"user": frappe.session.user,
}
if not custom_view and frappe.db.exists("CRM View Settings", default_view_filters):
list_view_settings = frappe.get_doc("CRM View Settings", default_view_filters)
columns = frappe.parse_json(list_view_settings.columns)
rows = frappe.parse_json(list_view_settings.rows)
is_default = False
elif not custom_view or is_default and hasattr(_list, "default_list_data"):
rows = default_rows
columns = _list.default_list_data().get("columns")
# check if rows has all keys from columns if not add them
for column in columns:
if column.get("key") not in rows:
rows.append(column.get("key"))
column["label"] = _(column.get("label"))
if column.get("key") == "_liked_by" and column.get("width") == "10rem":
column["width"] = "50px"
# check if rows has group_by_field if not add it
if group_by_field and group_by_field not in rows:
rows.append(group_by_field)
data = frappe.get_list(
doctype,
fields=rows,
filters=filters,
order_by=order_by,
page_length=page_length,
) or []
if view_type == "kanban":
if not rows:
rows = default_rows
if not kanban_columns and column_field:
field_meta = frappe.get_meta(doctype).get_field(column_field)
if field_meta.fieldtype == "Link":
kanban_columns = frappe.get_all(
field_meta.options,
fields=["name"],
order_by="modified asc",
)
elif field_meta.fieldtype == "Select":
kanban_columns = [{"name": option} for option in field_meta.options.split("\n")]
if not title_field:
title_field = "name"
if hasattr(_list, "default_kanban_settings"):
title_field = _list.default_kanban_settings().get("title_field")
if title_field not in rows:
rows.append(title_field)
if not kanban_fields:
kanban_fields = ["name"]
if hasattr(_list, "default_kanban_settings"):
kanban_fields = json.loads(_list.default_kanban_settings().get("kanban_fields"))
for field in kanban_fields:
if field not in rows:
rows.append(field)
for kc in kanban_columns:
column_filters = { column_field: kc.get('name') }
order = kc.get("order")
if column_field in filters and filters.get(column_field) != kc.name or kc.get('delete'):
column_data = []
else:
column_filters.update(filters.copy())
page_length = 20
if kc.get("page_length"):
page_length = kc.get("page_length")
if order:
column_data = get_records_based_on_order(doctype, rows, column_filters, page_length, order)
else:
column_data = frappe.get_list(
doctype,
fields=rows,
filters=convert_filter_to_tuple(doctype, column_filters),
order_by=order_by,
page_length=page_length,
)
new_filters = filters.copy()
new_filters.update({ column_field: kc.get('name') })
all_count = len(frappe.get_list(doctype, filters=convert_filter_to_tuple(doctype, new_filters)))
kc["all_count"] = all_count
kc["count"] = len(column_data)
for d in column_data:
getCounts(d, doctype)
if order:
column_data = sorted(
column_data, key=lambda x: order.index(x.get("name"))
if x.get("name") in order else len(order)
)
data.append({"column": kc, "fields": kanban_fields, "data": column_data})
fields = frappe.get_meta(doctype).fields
fields = [field for field in fields if field.fieldtype not in no_value_fields]
fields = [
{
"label": _(field.label),
"type": field.fieldtype,
"value": field.fieldname,
"options": field.options,
}
for field in fields
if field.label and field.fieldname
]
std_fields = [
{"label": "Name", "type": "Data", "value": "name"},
{"label": "Created On", "type": "Datetime", "value": "creation"},
{"label": "Last Modified", "type": "Datetime", "value": "modified"},
{
"label": "Modified By",
"type": "Link",
"value": "modified_by",
"options": "User",
},
{"label": "Assigned To", "type": "Text", "value": "_assign"},
{"label": "Owner", "type": "Link", "value": "owner", "options": "User"},
{"label": "Like", "type": "Data", "value": "_liked_by"},
]
for field in std_fields:
if field.get('value') not in rows:
rows.append(field.get('value'))
if field not in fields:
field["label"] = _(field["label"])
fields.append(field)
if not is_default and custom_view_name:
is_default = frappe.db.get_value("CRM View Settings", custom_view_name, "load_default_columns")
if group_by_field and view_type == "group_by":
def get_options(type, options):
if type == "Select":
return [option for option in options.split("\n")]
else:
has_empty_values = any([not d.get(group_by_field) for d in data])
options = list(set([d.get(group_by_field) for d in data]))
options = [u for u in options if u]
if has_empty_values:
options.append("")
if order_by and group_by_field in order_by:
order_by_fields = order_by.split(",")
order_by_fields = [(field.split(" ")[0], field.split(" ")[1]) for field in order_by_fields]
if (group_by_field, "asc") in order_by_fields:
options.sort()
elif (group_by_field, "desc") in order_by_fields:
options.sort(reverse=True)
else:
options.sort()
return options
for field in fields:
if field.get("value") == group_by_field:
group_by_field = {
"label": field.get("label"),
"name": field.get("value"),
"type": field.get("type"),
"options": get_options(field.get("type"), field.get("options")),
}
return {
"data": data,
"columns": columns,
"rows": rows,
"fields": fields,
"column_field": column_field,
"title_field": title_field,
"kanban_columns": kanban_columns,
"kanban_fields": kanban_fields,
"group_by_field": group_by_field,
"page_length": page_length,
"page_length_count": page_length_count,
"is_default": is_default,
"views": get_views(doctype),
"total_count": len(frappe.get_list(doctype, filters=filters)),
"row_count": len(data),
"form_script": get_form_script(doctype),
"list_script": get_form_script(doctype, "List"),
"view_type": view_type,
}
def convert_filter_to_tuple(doctype, filters):
if isinstance(filters, dict):
filters_items = filters.items()
filters = []
for key, value in filters_items:
filters.append(make_filter_tuple(doctype, key, value))
return filters
def get_records_based_on_order(doctype, rows, filters, page_length, order):
records = []
filters = convert_filter_to_tuple(doctype, filters)
in_filters = filters.copy()
in_filters.append([doctype, "name", "in", order[:page_length]])
records = frappe.get_list(
doctype,
fields=rows,
filters=in_filters,
order_by="creation desc",
page_length=page_length,
)
if len(records) < page_length:
not_in_filters = filters.copy()
not_in_filters.append([doctype, "name", "not in", order])
remaining_records = frappe.get_list(
doctype,
fields=rows,
filters=not_in_filters,
order_by="creation desc",
page_length=page_length - len(records),
)
for record in remaining_records:
records.append(record)
return records
@frappe.whitelist()
def get_fields_meta(doctype, restricted_fieldtypes=None, as_array=False):
not_allowed_fieldtypes = [
"Tab Break",
"Section Break",
"Column Break",
]
if restricted_fieldtypes:
restricted_fieldtypes = frappe.parse_json(restricted_fieldtypes)
not_allowed_fieldtypes += restricted_fieldtypes
fields = frappe.get_meta(doctype).fields
fields = [field for field in fields if field.fieldtype not in not_allowed_fieldtypes]
standard_fields = [
{"fieldname": "name", "fieldtype": "Link", "label": "ID", "options": doctype},
{
"fieldname": "owner",
"fieldtype": "Link",
"label": "Created By",
"options": "User"
},
{
"fieldname": "modified_by",
"fieldtype": "Link",
"label": "Last Updated By",
"options": "User",
},
{"fieldname": "_user_tags", "fieldtype": "Data", "label": "Tags"},
{"fieldname": "_liked_by", "fieldtype": "Data", "label": "Like"},
{"fieldname": "_comments", "fieldtype": "Text", "label": "Comments"},
{"fieldname": "_assign", "fieldtype": "Text", "label": "Assigned To"},
{"fieldname": "creation", "fieldtype": "Datetime", "label": "Created On"},
{"fieldname": "modified", "fieldtype": "Datetime", "label": "Last Updated On"},
]
for field in standard_fields:
if not restricted_fieldtypes or field.get('fieldtype') not in restricted_fieldtypes:
fields.append(field)
if as_array:
return fields
fields_meta = {}
for field in fields:
fields_meta[field.get('fieldname')] = field
return fields_meta
@frappe.whitelist()
def get_sidebar_fields(doctype, name):
if not frappe.db.exists("CRM Fields Layout", {"dt": doctype, "type": "Side Panel"}):
return []
layout = frappe.get_doc("CRM Fields Layout", {"dt": doctype, "type": "Side Panel"}).layout
if not layout:
return []
layout = json.loads(layout)
not_allowed_fieldtypes = [
"Tab Break",
"Section Break",
"Column Break",
]
fields = frappe.get_meta(doctype).fields
fields = [field for field in fields if field.fieldtype not in not_allowed_fieldtypes]
doc = frappe.get_cached_doc(doctype, name)
has_high_permlevel_fields = any(df.permlevel > 0 for df in fields)
if has_high_permlevel_fields:
has_read_access_to_permlevels = doc.get_permlevel_access("read")
has_write_access_to_permlevels = doc.get_permlevel_access("write")
for section in layout:
section["name"] = section.get("name") or section.get("label")
for field in section.get("fields") if section.get("fields") else []:
field_obj = next((f for f in fields if f.fieldname == field), None)
if field_obj:
if field_obj.permlevel > 0:
field_has_write_access = field_obj.permlevel in has_write_access_to_permlevels
field_has_read_access = field_obj.permlevel in has_read_access_to_permlevels
if not field_has_write_access and field_has_read_access:
field_obj.read_only = 1
if not field_has_read_access and not field_has_write_access:
field_obj.hidden = 1
section["fields"][section.get("fields").index(field)] = get_field_obj(field_obj)
fields_meta = {}
for field in fields:
fields_meta[field.fieldname] = field
return layout
def get_field_obj(field):
obj = {
"label": field.label,
"type": get_type(field),
"name": field.fieldname,
"hidden": field.hidden,
"reqd": field.reqd,
"read_only": field.read_only,
"all_properties": field,
}
obj["placeholder"] = "Add " + field.label + "..."
if field.fieldtype == "Link":
obj["placeholder"] = "Select " + field.label + "..."
obj["doctype"] = field.options
elif field.fieldtype == "Select" and field.options:
obj["options"] = [{"label": option, "value": option} for option in field.options.split("\n")]
if field.read_only:
obj["tooltip"] = "This field is read only and cannot be edited."
return obj
def get_type(field):
if field.fieldtype == "Data" and field.options == "Phone":
return "phone"
elif field.fieldtype == "Data" and field.options == "Email":
return "email"
elif field.fieldtype == "Check":
return "checkbox"
elif field.fieldtype == "Int":
return "number"
elif field.fieldtype in ["Small Text", "Text", "Long Text"]:
return "textarea"
elif field.read_only:
return "read_only"
return field.fieldtype.lower()
def get_assigned_users(doctype, name, default_assigned_to=None):
assigned_users = frappe.get_all(
"ToDo",
fields=["allocated_to"],
filters={
"reference_type": doctype,
"reference_name": name,
"status": ("!=", "Cancelled"),
},
pluck="allocated_to",
)
users = list(set(assigned_users))
# if users is empty, add default_assigned_to
if not users and default_assigned_to:
users = [default_assigned_to]
return users
@frappe.whitelist()
def get_fields(doctype: str, allow_all_fieldtypes: bool = False):
not_allowed_fieldtypes = list(frappe.model.no_value_fields) + ["Read Only"]
if allow_all_fieldtypes:
not_allowed_fieldtypes = []
fields = frappe.get_meta(doctype).fields
_fields = []
for field in fields:
if (
field.fieldtype not in not_allowed_fieldtypes
and field.fieldname
):
_fields.append({
"label": field.label,
"type": field.fieldtype,
"value": field.fieldname,
"options": field.options,
"mandatory": field.reqd,
"read_only": field.read_only,
"hidden": field.hidden,
"depends_on": field.depends_on,
"mandatory_depends_on": field.mandatory_depends_on,
"read_only_depends_on": field.read_only_depends_on,
})
return _fields
def getCounts(d, doctype):
d["_email_count"] = frappe.db.count("Communication", filters={"reference_doctype": doctype, "reference_name": d.get("name"), "communication_type": "Communication"}) or 0
d["_email_count"] = d["_email_count"] + frappe.db.count("Communication", filters={"reference_doctype": doctype, "reference_name": d.get("name"), "communication_type": "Automated Message"})
d["_comment_count"] = frappe.db.count("Comment", filters={"reference_doctype": doctype, "reference_name": d.get("name"), "comment_type": "Comment"})
d["_task_count"] = frappe.db.count("CRM Task", filters={"reference_doctype": doctype, "reference_docname": d.get("name")})
d["_note_count"] = frappe.db.count("FCRM Note", filters={"reference_doctype": doctype, "reference_docname": d.get("name")})
return d
|
2302_79757062/crm
|
crm/api/doc.py
|
Python
|
agpl-3.0
| 20,501
|
import frappe
from frappe.query_builder import Order
@frappe.whitelist()
def get_notifications():
Notification = frappe.qb.DocType("CRM Notification")
query = (
frappe.qb.from_(Notification)
.select("*")
.where(Notification.to_user == frappe.session.user)
.orderby("creation", order=Order.desc)
)
notifications = query.run(as_dict=True)
_notifications = []
for notification in notifications:
_notifications.append(
{
"creation": notification.creation,
"from_user": {
"name": notification.from_user,
"full_name": frappe.get_value(
"User", notification.from_user, "full_name"
),
},
"type": notification.type,
"to_user": notification.to_user,
"read": notification.read,
"comment": notification.comment,
"notification_text": notification.notification_text,
"notification_type_doctype": notification.notification_type_doctype,
"notification_type_doc": notification.notification_type_doc,
"reference_doctype": (
"deal" if notification.reference_doctype == "CRM Deal" else "lead"
),
"reference_name": notification.reference_name,
"route_name": (
"Deal" if notification.reference_doctype == "CRM Deal" else "Lead"
),
}
)
return _notifications
@frappe.whitelist()
def mark_as_read(user=None, doc=None):
user = user or frappe.session.user
filters = {"to_user": user, "read": False}
if doc:
or_filters = [
{"comment": doc},
{"notification_type_doc": doc},
]
for n in frappe.get_all("CRM Notification", filters=filters, or_filters=or_filters):
d = frappe.get_doc("CRM Notification", n.name)
d.read = True
d.save()
|
2302_79757062/crm
|
crm/api/notifications.py
|
Python
|
agpl-3.0
| 2,052
|
import frappe
@frappe.whitelist()
def get_users():
users = frappe.qb.get_query(
"User",
fields=["name", "email", "enabled", "user_image", "first_name", "last_name", "full_name", "user_type"],
order_by="full_name asc",
distinct=True,
).run(as_dict=1)
for user in users:
if frappe.session.user == user.name:
user.session_user = True
user.is_manager = (
"Sales Manager" in frappe.get_roles(user.name) or user.name == "Administrator"
)
return users
@frappe.whitelist()
def get_contacts():
contacts = frappe.get_all(
"Contact",
fields=[
"name",
"salutation",
"first_name",
"last_name",
"full_name",
"gender",
"address",
"designation",
"image",
"email_id",
"mobile_no",
"phone",
"company_name",
"modified"
],
order_by="first_name asc",
distinct=True,
)
for contact in contacts:
contact["email_ids"] = frappe.get_all(
"Contact Email",
filters={"parenttype": "Contact", "parent": contact.name},
fields=["name", "email_id", "is_primary"],
)
contact["phone_nos"] = frappe.get_all(
"Contact Phone",
filters={"parenttype": "Contact", "parent": contact.name},
fields=["name", "phone", "is_primary_phone", "is_primary_mobile_no"],
)
return contacts
@frappe.whitelist()
def get_lead_contacts():
lead_contacts = frappe.get_all(
"CRM Lead",
fields=[
"name",
"lead_name",
"mobile_no",
"phone",
"image",
"modified"
],
filters={"converted": 0},
order_by="lead_name asc",
distinct=True,
)
return lead_contacts
@frappe.whitelist()
def get_organizations():
organizations = frappe.qb.get_query(
"CRM Organization",
fields=['*'],
order_by="name asc",
distinct=True,
).run(as_dict=1)
return organizations
|
2302_79757062/crm
|
crm/api/session.py
|
Python
|
agpl-3.0
| 1,741
|
import frappe
def after_insert(doc, method):
if doc.reference_type in ["CRM Lead", "CRM Deal"] and doc.reference_name and doc.allocated_to:
fieldname = "lead_owner" if doc.reference_type == "CRM Lead" else "deal_owner"
lead_owner = frappe.db.get_value(doc.reference_type, doc.reference_name, fieldname)
if not lead_owner:
frappe.db.set_value(doc.reference_type, doc.reference_name, fieldname, doc.allocated_to)
|
2302_79757062/crm
|
crm/api/todo.py
|
Python
|
agpl-3.0
| 421
|
import frappe
from pypika import Criterion
@frappe.whitelist()
def get_views(doctype):
View = frappe.qb.DocType("CRM View Settings")
query = (
frappe.qb.from_(View)
.select("*")
.where(Criterion.any([View.user == '', View.user == frappe.session.user]))
)
if doctype:
query = query.where(View.dt == doctype)
views = query.run(as_dict=True)
return views
|
2302_79757062/crm
|
crm/api/views.py
|
Python
|
agpl-3.0
| 367
|
import frappe
import json
from frappe import _
from crm.api.doc import get_assigned_users
def validate(doc, method):
if doc.type == "Incoming" and doc.get("from"):
name, doctype = get_lead_or_deal_from_number(doc.get("from"))
doc.reference_doctype = doctype
doc.reference_name = name
def on_update(doc, method):
frappe.publish_realtime(
"whatsapp_message",
{
"reference_doctype": doc.reference_doctype,
"reference_name": doc.reference_name,
},
)
notify_agent(doc)
def notify_agent(doc):
if doc.type == "Incoming":
doctype = doc.reference_doctype
if doctype.startswith("CRM "):
doctype = doctype[4:].lower()
notification_text = f"""
<div class="mb-2 leading-5 text-gray-600">
<span class="font-medium text-gray-900">{ _('You') }</span>
<span>{ _('received a whatsapp message in {0}').format(doctype) }</span>
<span class="font-medium text-gray-900">{ doc.reference_name }</span>
</div>
"""
assigned_users = get_assigned_users(doc.reference_doctype, doc.reference_name)
for user in assigned_users:
values = frappe._dict(
doctype="CRM Notification",
from_user=doc.owner,
to_user=user,
type="WhatsApp",
message=doc.message,
notification_text=notification_text,
notification_type_doctype="WhatsApp Message",
notification_type_doc=doc.name,
reference_doctype=doc.reference_doctype,
reference_name=doc.reference_name,
)
if frappe.db.exists("CRM Notification", values):
return
frappe.get_doc(values).insert(ignore_permissions=True)
def get_lead_or_deal_from_number(number):
"""Get lead/deal from the given number."""
def find_record(doctype, mobile_no, where=""):
mobile_no = parse_mobile_no(mobile_no)
query = f"""
SELECT name, mobile_no
FROM `tab{doctype}`
WHERE CONCAT('+', REGEXP_REPLACE(mobile_no, '[^0-9]', '')) = {mobile_no}
"""
data = frappe.db.sql(query + where, as_dict=True)
return data[0].name if data else None
doctype = "CRM Deal"
doc = find_record(doctype, number) or None
if not doc:
doctype = "CRM Lead"
doc = find_record(doctype, number, "AND converted is not True")
if not doc:
doc = find_record(doctype, number)
return doc, doctype
def parse_mobile_no(mobile_no: str):
"""Parse mobile number to remove spaces, brackets, etc.
>>> parse_mobile_no('+91 (766) 667 6666')
... '+917666676666'
"""
return "".join([c for c in mobile_no if c.isdigit() or c == "+"])
@frappe.whitelist()
def is_whatsapp_enabled():
if not frappe.db.exists("DocType", "WhatsApp Settings"):
return False
return frappe.get_cached_value("WhatsApp Settings", "WhatsApp Settings", "enabled")
@frappe.whitelist()
def is_whatsapp_installed():
if not frappe.db.exists("DocType", "WhatsApp Settings"):
return False
return True
@frappe.whitelist()
def get_whatsapp_messages(reference_doctype, reference_name):
if not frappe.db.exists("DocType", "WhatsApp Message"):
return []
messages = frappe.get_all(
"WhatsApp Message",
filters={
"reference_doctype": reference_doctype,
"reference_name": reference_name,
},
fields=[
"name",
"type",
"to",
"from",
"content_type",
"message_type",
"attach",
"template",
"use_template",
"message_id",
"is_reply",
"reply_to_message_id",
"creation",
"message",
"status",
"reference_doctype",
"reference_name",
"template_parameters",
"template_header_parameters",
],
)
# Filter messages to get only Template messages
template_messages = [
message for message in messages if message["message_type"] == "Template"
]
# Iterate through template messages
for template_message in template_messages:
# Find the template that this message is using
template = frappe.get_doc("WhatsApp Templates", template_message["template"])
# If the template is found, add the template details to the template message
if template:
template_message["template_name"] = template.template_name
if template_message["template_parameters"]:
parameters = json.loads(template_message["template_parameters"])
template.template = parse_template_parameters(
template.template, parameters
)
template_message["template"] = template.template
if template_message["template_header_parameters"]:
header_parameters = json.loads(
template_message["template_header_parameters"]
)
template.header = parse_template_parameters(
template.header, header_parameters
)
template_message["header"] = template.header
template_message["footer"] = template.footer
# Filter messages to get only reaction messages
reaction_messages = [
message for message in messages if message["content_type"] == "reaction"
]
# Iterate through reaction messages
for reaction_message in reaction_messages:
# Find the message that this reaction is reacting to
reacted_message = next(
(
m
for m in messages
if m["message_id"] == reaction_message["reply_to_message_id"]
),
None,
)
# If the reacted message is found, add the reaction to it
if reacted_message:
reacted_message["reaction"] = reaction_message["message"]
for message in messages:
from_name = get_from_name(message) if message["from"] else _("You")
message["from_name"] = from_name
# Filter messages to get only replies
reply_messages = [message for message in messages if message["is_reply"]]
# Iterate through reply messages
for reply_message in reply_messages:
# Find the message that this message is replying to
replied_message = next(
(
m
for m in messages
if m["message_id"] == reply_message["reply_to_message_id"]
),
None,
)
# If the replied message is found, add the reply details to the reply message
from_name = (
get_from_name(reply_message) if replied_message["from"] else _("You")
)
if replied_message:
message = replied_message["message"]
if replied_message["message_type"] == "Template":
message = replied_message["template"]
reply_message["reply_message"] = message
reply_message["header"] = replied_message.get("header") or ""
reply_message["footer"] = replied_message.get("footer") or ""
reply_message["reply_to"] = replied_message["name"]
reply_message["reply_to_type"] = replied_message["type"]
reply_message["reply_to_from"] = from_name
return [message for message in messages if message["content_type"] != "reaction"]
@frappe.whitelist()
def create_whatsapp_message(
reference_doctype,
reference_name,
message,
to,
attach,
reply_to,
content_type="text",
):
doc = frappe.new_doc("WhatsApp Message")
if reply_to:
reply_doc = frappe.get_doc("WhatsApp Message", reply_to)
doc.update(
{
"is_reply": True,
"reply_to_message_id": reply_doc.message_id,
}
)
doc.update(
{
"reference_doctype": reference_doctype,
"reference_name": reference_name,
"message": message or attach,
"to": to,
"attach": attach,
"content_type": content_type,
}
)
doc.insert(ignore_permissions=True)
return doc.name
@frappe.whitelist()
def send_whatsapp_template(reference_doctype, reference_name, template, to):
doc = frappe.new_doc("WhatsApp Message")
doc.update(
{
"reference_doctype": reference_doctype,
"reference_name": reference_name,
"message_type": "Template",
"message": "Template message",
"content_type": "text",
"use_template": True,
"template": template,
"to": to,
}
)
doc.insert(ignore_permissions=True)
return doc.name
@frappe.whitelist()
def react_on_whatsapp_message(emoji, reply_to_name):
reply_to_doc = frappe.get_doc("WhatsApp Message", reply_to_name)
to = reply_to_doc.type == "Incoming" and reply_to_doc.get("from") or reply_to_doc.to
doc = frappe.new_doc("WhatsApp Message")
doc.update(
{
"reference_doctype": reply_to_doc.reference_doctype,
"reference_name": reply_to_doc.reference_name,
"message": emoji,
"to": to,
"reply_to_message_id": reply_to_doc.message_id,
"content_type": "reaction",
}
)
doc.insert(ignore_permissions=True)
return doc.name
def parse_template_parameters(string, parameters):
for i, parameter in enumerate(parameters, start=1):
placeholder = "{{" + str(i) + "}}"
string = string.replace(placeholder, parameter)
return string
def get_from_name(message):
doc = frappe.get_doc(message["reference_doctype"], message["reference_name"])
from_name = ""
if message["reference_doctype"] == "CRM Deal":
if doc.get("contacts"):
for c in doc.get("contacts"):
if c.is_primary:
from_name = c.full_name or c.mobile_no
break
else:
from_name = doc.get("lead_name")
else:
from_name = doc.get("first_name") + " " + doc.get("last_name")
return from_name
|
2302_79757062/crm
|
crm/api/whatsapp.py
|
Python
|
agpl-3.0
| 10,345
|
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
// frappe.ui.form.on("CRM Call Log", {
// refresh(frm) {
// },
// });
|
2302_79757062/crm
|
crm/fcrm/doctype/crm_call_log/crm_call_log.js
|
JavaScript
|
agpl-3.0
| 195
|
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe.model.document import Document
class CRMCallLog(Document):
@staticmethod
def default_list_data():
columns = [
{
'label': 'From',
'type': 'Link',
'key': 'caller',
'options': 'User',
'width': '9rem',
},
{
'label': 'To',
'type': 'Link',
'key': 'receiver',
'options': 'User',
'width': '9rem',
},
{
'label': 'Type',
'type': 'Select',
'key': 'type',
'width': '9rem',
},
{
'label': 'Status',
'type': 'Select',
'key': 'status',
'width': '9rem',
},
{
'label': 'Duration',
'type': 'Duration',
'key': 'duration',
'width': '6rem',
},
{
'label': 'From (number)',
'type': 'Data',
'key': 'from',
'width': '9rem',
},
{
'label': 'To (number)',
'type': 'Data',
'key': 'to',
'width': '9rem',
},
{
'label': 'Created On',
'type': 'Datetime',
'key': 'creation',
'width': '8rem',
},
]
rows = [
"name",
"caller",
"receiver",
"type",
"status",
"duration",
"from",
"to",
"note",
"recording_url",
"reference_doctype",
"reference_docname",
"creation",
]
return {'columns': columns, 'rows': rows}
@frappe.whitelist()
def create_lead_from_call_log(call_log):
lead = frappe.new_doc("CRM Lead")
lead.first_name = "Lead from call " + call_log.get("from")
lead.mobile_no = call_log.get("from")
lead.lead_owner = frappe.session.user
lead.save(ignore_permissions=True)
frappe.db.set_value("CRM Call Log", call_log.get("name"), {
"reference_doctype": "CRM Lead",
"reference_docname": lead.name
})
if call_log.get("note"):
frappe.db.set_value("FCRM Note", call_log.get("note"), {
"reference_doctype": "CRM Lead",
"reference_docname": lead.name
})
return lead.name
|
2302_79757062/crm
|
crm/fcrm/doctype/crm_call_log/crm_call_log.py
|
Python
|
agpl-3.0
| 2,006
|
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
// frappe.ui.form.on("CRM Communication Status", {
// refresh(frm) {
// },
// });
|
2302_79757062/crm
|
crm/fcrm/doctype/crm_communication_status/crm_communication_status.js
|
JavaScript
|
agpl-3.0
| 207
|
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class CRMCommunicationStatus(Document):
pass
|
2302_79757062/crm
|
crm/fcrm/doctype/crm_communication_status/crm_communication_status.py
|
Python
|
agpl-3.0
| 227
|
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class CRMContacts(Document):
pass
|
2302_79757062/crm
|
crm/fcrm/doctype/crm_contacts/crm_contacts.py
|
Python
|
agpl-3.0
| 216
|
import frappe
from frappe import _
from crm.api.doc import get_fields_meta, get_assigned_users
from crm.fcrm.doctype.crm_form_script.crm_form_script import get_form_script
@frappe.whitelist()
def get_deal(name):
Deal = frappe.qb.DocType("CRM Deal")
query = (
frappe.qb.from_(Deal)
.select("*")
.where(Deal.name == name)
.limit(1)
)
deal = query.run(as_dict=True)
if not len(deal):
frappe.throw(_("Deal not found"), frappe.DoesNotExistError)
deal = deal.pop()
deal["contacts"] = frappe.get_all(
"CRM Contacts",
filters={"parenttype": "CRM Deal", "parent": deal.name},
fields=["contact", "is_primary"],
)
deal["doctype"] = "CRM Deal"
deal["fields_meta"] = get_fields_meta("CRM Deal")
deal["_form_script"] = get_form_script('CRM Deal')
deal["_assign"] = get_assigned_users("CRM Deal", deal.name, deal.owner)
return deal
@frappe.whitelist()
def get_deal_contacts(name):
contacts = frappe.get_all(
"CRM Contacts",
filters={"parenttype": "CRM Deal", "parent": name},
fields=["contact", "is_primary"],
)
deal_contacts = []
for contact in contacts:
is_primary = contact.is_primary
contact = frappe.get_doc("Contact", contact.contact).as_dict()
def get_primary_email(contact):
for email in contact.email_ids:
if email.is_primary:
return email.email_id
return contact.email_ids[0].email_id if contact.email_ids else ""
def get_primary_mobile_no(contact):
for phone in contact.phone_nos:
if phone.is_primary:
return phone.phone
return contact.phone_nos[0].phone if contact.phone_nos else ""
_contact = {
"name": contact.name,
"image": contact.image,
"full_name": contact.full_name,
"email": get_primary_email(contact),
"mobile_no": get_primary_mobile_no(contact),
"is_primary": is_primary,
}
deal_contacts.append(_contact)
return deal_contacts
|
2302_79757062/crm
|
crm/fcrm/doctype/crm_deal/api.py
|
Python
|
agpl-3.0
| 1,843
|