text
stringlengths
1
93.6k
url = 'https://graph.facebook.com/graphql'
res = requests.post(url, data=data, headers=headers)
print res.text
if '"is_shielded":true' in res.text:
os.system('clear')
print logo
print 52 * '\x1b[1;97m\xe2\x95\x90'
print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mActivated'
raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]')
lain()
else:
if '"is_shielded":false' in res.text:
os.system('clear')
print logo
print 52 * '\x1b[1;97m\xe2\x95\x90'
print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;91mDeactivated'
raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]')
lain()
else:
print '\x1b[1;91m[!] Error'
keluar()
if __name__ == '__main__':
login()
# <FILESEP>
import argparse
import torch
import torch.nn as nn
import numpy as np
import pickle
import torch.optim as optim
import scipy.misc
import torch.backends.cudnn as cudnn
import torch.nn.functional as F
import sys
import os
import os.path as osp
import random
import logging
import time
import torch.distributed as dist
import torch.multiprocessing as mp
from tensorboardX import SummaryWriter
from model.feature_extractor import resnet_feature_extractor
from model.classifier import ASPP_Classifier_Gen
from model.discriminator import FCDiscriminator
from utils.util import *
from data import create_dataset
import cv2
IMG_MEAN = np.array((0.485, 0.456, 0.406), dtype=np.float32)
IMG_STD = np.array((0.229, 0.224, 0.225), dtype=np.float32)
MODEL = 'DeepLab'
BATCH_SIZE = 1
ITER_SIZE = 1
NUM_WORKERS = 16
IGNORE_LABEL = 250
LEARNING_RATE = 2.5e-4
MOMENTUM = 0.9
NUM_CLASSES = 19
NUM_STEPS = 62500
NUM_STEPS_STOP = 40000 # early stopping
POWER = 0.9
RANDOM_SEED = 1234
RESUME = './pretrained/model_phase1.pth'
SAVE_NUM_IMAGES = 2
SAVE_PRED_EVERY = 1000
SNAPSHOT_DIR = './snapshots/'
WEIGHT_DECAY = 0.0005
LOG_DIR = './log'
LEARNING_RATE_D = 1e-4
LAMBDA_SEG = 0.1
LAMBDA_ADV_TARGET1 = 0.0002
LAMBDA_ADV_TARGET2 = 0.001
SET = 'train'
def get_arguments():
"""Parse all the arguments provided from the CLI.
Returns:
A list of parsed arguments.
"""
parser = argparse.ArgumentParser(description="DeepLab-ResNet Network")
parser.add_argument("--model", type=str, default=MODEL,
help="available options : DeepLab")
parser.add_argument("--batch-size", type=int, default=BATCH_SIZE,
help="Number of images sent to the network in one step.")
parser.add_argument("--iter-size", type=int, default=ITER_SIZE,
help="Accumulate gradients for ITER_SIZE iterations.")
parser.add_argument("--num-workers", type=int, default=NUM_WORKERS,
help="number of workers for multithread dataloading.")
parser.add_argument("--ignore-label", type=int, default=IGNORE_LABEL,