desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Adds a \'postbuild\' variable if there is a postbuild for |output|.'
def AppendPostbuildVariable(self, variables, spec, output, binary, is_command_start=False):
postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start) if postbuild: variables.append(('postbuilds', postbuild))
'Returns a shell command that runs all the postbuilds, and removes |output| if any of them fails. If |is_command_start| is False, then the returned string will start with \' && \'.'
def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
if ((not self.xcode_settings) or (spec['type'] == 'none') or (not output)): return '' output = QuoteShellArgument(output, self.flavor) postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True) if (output_binary is not None): postbuilds = self.xcode_settings.AddImplicitP...
'Given an environment, returns a string looking like \'export FOO=foo; export BAR="${FOO} bar;\' that exports |env| to the shell.'
def ComputeExportEnvString(self, env):
export_str = [] for (k, v) in env: export_str.append(('export %s=%s;' % (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))) return ' '.join(export_str)
'Return the \'output\' (full output path) to a bundle output directory.'
def ComputeMacBundleOutput(self):
assert self.is_mac_bundle path = generator_default_variables['PRODUCT_DIR'] return self.ExpandSpecial(os.path.join(path, self.xcode_settings.GetWrapperName()))
'Compute the filename of the final output for the current target.'
def ComputeOutputFileName(self, spec, type=None):
if (not type): type = spec['type'] default_variables = copy.copy(generator_default_variables) CalculateVariables(default_variables, {'flavor': self.flavor}) DEFAULT_PREFIX = {'loadable_module': default_variables['SHARED_LIB_PREFIX'], 'shared_library': default_variables['SHARED_LIB_PREFIX'], 'sta...
'Compute the path for the final output of the spec.'
def ComputeOutput(self, spec, arch=None):
type = spec['type'] if (self.flavor == 'win'): override = self.msvs_settings.GetOutputName(self.config_name, self.ExpandSpecial) if override: return override if ((arch is None) and (self.flavor == 'mac') and (type in ('static_library', 'executable', 'shared_library', 'loadable_mo...
'Write out a new ninja "rule" statement for a given command. Returns the name of the new rule, and a copy of |args| with variables expanded.'
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool, depfile=None):
if (self.flavor == 'win'): args = [self.msvs_settings.ConvertVSMacros(arg, self.base_to_build, config=self.config_name) for arg in args] description = self.msvs_settings.ConvertVSMacros(description, config=self.config_name) elif (self.flavor == 'mac'): args = [gyp.xcode_emulation.ExpandE...
'The main entry point: writes a .mk file for a single target. Arguments: qualified_target: target we\'re generating base_path: path relative to source root we\'re building in, used to resolve target-relative paths output_filename: output .mk file name to write spec, configs: gyp info part_of_all: flag indicating this t...
def Write(self, qualified_target, base_path, output_filename, spec, configs, part_of_all):
gyp.common.EnsureDirExists(output_filename) self.fp = open(output_filename, 'w') self.fp.write(header) self.qualified_target = qualified_target self.path = base_path self.target = spec['target_name'] self.type = spec['type'] self.toolset = spec['toolset'] self.is_mac_bundle = gyp.xco...
'Write a "sub-project" Makefile. This is a small, wrapper Makefile that calls the top-level Makefile to build the targets from a single gyp file (i.e. a sub-project). Arguments: output_filename: sub-project Makefile name to write makefile_path: path to the top-level Makefile targets: list of "all" targets for this sub-...
def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
gyp.common.EnsureDirExists(output_filename) self.fp = open(output_filename, 'w') self.fp.write(header) self.WriteLn(('export builddir_name ?= %s' % os.path.join(os.path.dirname(output_filename), build_dir))) self.WriteLn('.PHONY: all') self.WriteLn('all:') if makefile_path: ...
'Write Makefile code for any \'actions\' from the gyp input. extra_sources: a list that will be filled in with newly generated source files, if any extra_outputs: a list that will be filled in with any outputs of these actions (used to make other pieces dependent on these actions) part_of_all: flag indicating this targ...
def WriteActions(self, actions, extra_sources, extra_outputs, extra_mac_bundle_resources, part_of_all):
env = self.GetSortedXcodeEnv() for action in actions: name = StringToMakefileVariable(('%s_%s' % (self.qualified_target, action['action_name']))) self.WriteLn(('### Rules for action "%s":' % action['action_name'])) inputs = action['inputs'] outputs = action['outputs']...
'Write Makefile code for any \'rules\' from the gyp input. extra_sources: a list that will be filled in with newly generated source files, if any extra_outputs: a list that will be filled in with any outputs of these rules (used to make other pieces dependent on these rules) part_of_all: flag indicating this target is ...
def WriteRules(self, rules, extra_sources, extra_outputs, extra_mac_bundle_resources, part_of_all):
env = self.GetSortedXcodeEnv() for rule in rules: name = StringToMakefileVariable(('%s_%s' % (self.qualified_target, rule['rule_name']))) count = 0 self.WriteLn(('### Generated for rule %s:' % name)) all_outputs = [] for rule_source in rule.get('rule_sources',...
'Write Makefile code for any \'copies\' from the gyp input. extra_outputs: a list that will be filled in with any outputs of this action (used to make other pieces dependent on this action) part_of_all: flag indicating this target is part of \'all\''
def WriteCopies(self, copies, extra_outputs, part_of_all):
self.WriteLn('### Generated for copy rule.') variable = StringToMakefileVariable((self.qualified_target + '_copies')) outputs = [] for copy in copies: for path in copy['files']: path = Sourceify(self.Absolutify(path)) filename = os.path.split(path)[1] ...
'Writes Makefile code for \'mac_bundle_resources\'.'
def WriteMacBundleResources(self, resources, bundle_deps):
self.WriteLn('### Generated for mac_bundle_resources') for (output, res) in gyp.xcode_emulation.GetMacBundleResources(generator_default_variables['PRODUCT_DIR'], self.xcode_settings, map(Sourceify, map(self.Absolutify, resources))): (_, ext) = os.path.splitext(output) if (ext != '.xcass...
'Write Makefile code for bundle Info.plist files.'
def WriteMacInfoPlist(self, bundle_deps):
(info_plist, out, defines, extra_env) = gyp.xcode_emulation.GetMacInfoPlist(generator_default_variables['PRODUCT_DIR'], self.xcode_settings, (lambda p: Sourceify(self.Absolutify(p)))) if (not info_plist): return if defines: intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' + os.path.basen...
'Write Makefile code for any \'sources\' from the gyp input. These are source files necessary to build the current target. configs, deps, sources: input from gyp. extra_outputs: a list of extra outputs this action should be dependent on; used to serialize action/rules before compilation extra_link_deps: a list that wil...
def WriteSources(self, configs, deps, sources, extra_outputs, extra_link_deps, part_of_all, precompiled_header):
for configname in sorted(configs.keys()): config = configs[configname] self.WriteList(config.get('defines'), ('DEFS_%s' % configname), prefix='-D', quoter=EscapeCppDefine) if (self.flavor == 'mac'): cflags = self.xcode_settings.GetCflags(configname) cflags_c = self.xc...
'Writes make rules to compile prefix headers.'
def WritePchTargets(self, pch_commands):
if (not pch_commands): return for (gch, lang_flag, lang, input) in pch_commands: extra_flags = {'c': '$(CFLAGS_C_$(BUILDTYPE))', 'cc': '$(CFLAGS_CC_$(BUILDTYPE))', 'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))', 'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))'}[l...
'Return the \'output basename\' of a gyp spec. E.g., the loadable module \'foobar\' in directory \'baz\' will produce \'libfoobar.so\''
def ComputeOutputBasename(self, spec):
assert (not self.is_mac_bundle) if ((self.flavor == 'mac') and (self.type in ('static_library', 'executable', 'shared_library', 'loadable_module'))): return self.xcode_settings.GetExecutablePath() target = spec['target_name'] target_prefix = '' target_ext = '' if (self.type == 'static_li...
'Return the \'output\' (full output path) of a gyp spec. E.g., the loadable module \'foobar\' in directory \'baz\' will produce \'$(obj)/baz/libfoobar.so\''
def ComputeOutput(self, spec):
assert (not self.is_mac_bundle) path = os.path.join(('$(obj).' + self.toolset), self.path) if ((self.type == 'executable') or self._InstallImmediately()): path = '$(builddir)' path = spec.get('product_dir', path) return os.path.join(path, self.ComputeOutputBasename(spec))
'Return the \'output\' (full output path) to a bundle output directory.'
def ComputeMacBundleOutput(self, spec):
assert self.is_mac_bundle path = generator_default_variables['PRODUCT_DIR'] return os.path.join(path, self.xcode_settings.GetWrapperName())
'Return the \'output\' (full output path) to the binary in a bundle.'
def ComputeMacBundleBinaryOutput(self, spec):
path = generator_default_variables['PRODUCT_DIR'] return os.path.join(path, self.xcode_settings.GetExecutablePath())
'Compute the dependencies of a gyp spec. Returns a tuple (deps, link_deps), where each is a list of filenames that will need to be put in front of make for either building (deps) or linking (link_deps).'
def ComputeDeps(self, spec):
deps = [] link_deps = [] if ('dependencies' in spec): deps.extend([target_outputs[dep] for dep in spec['dependencies'] if target_outputs[dep]]) for dep in spec['dependencies']: if (dep in target_link_deps): link_deps.append(target_link_deps[dep]) deps.exte...
'Write Makefile code to produce the final target of the gyp spec. spec, configs: input from gyp. deps, link_deps: dependency lists; see ComputeDeps() extra_outputs: any extra outputs that our target should depend on part_of_all: flag indicating this target is part of \'all\''
def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, extra_outputs, part_of_all):
self.WriteLn('### Rules for final target.') if extra_outputs: self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs) self.WriteMakeRule(extra_outputs, deps, comment='Preserve order dependency of special output on deps.', order_only=True) target...
'Write a variable definition that is a list of values. E.g. WriteList([\'a\',\'b\'], \'foo\', prefix=\'blah\') writes out foo = blaha blahb but in a pretty-printed style.'
def WriteList(self, value_list, variable=None, prefix='', quoter=QuoteIfNecessary):
values = '' if value_list: value_list = [quoter((prefix + l)) for l in value_list] values = (' \\\n DCTB ' + ' \\\n DCTB '.join(value_list)) self.fp.write(('%s :=%s\n\n' % (variable, values)))
'Write a Makefile rule that uses do_cmd. This makes the outputs dependent on the command line that was run, as well as support the V= make command line flag.'
def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None, postbuilds=False):
suffix = '' if postbuilds: assert (',' not in command) suffix = ',,1' self.WriteMakeRule(outputs, inputs, actions=[('$(call do_cmd,%s%s)' % (command, suffix))], comment=comment, command=command, force=True) outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs] self.WriteL...
'Write a Makefile rule, with some extra tricks. outputs: a list of outputs for the rule (note: this is not directly supported by make; see comments below) inputs: a list of inputs for the rule actions: a list of shell commands to run for the rule comment: a comment to put in the Makefile above the rule (also useful for...
def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, order_only=False, force=False, phony=False, command=None):
outputs = map(QuoteSpaces, outputs) inputs = map(QuoteSpaces, inputs) if comment: self.WriteLn(('# ' + comment)) if phony: self.WriteLn(('.PHONY: ' + ' '.join(outputs))) if actions: self.WriteLn(('%s: TOOLSET := $(TOOLSET)' % outputs[0])) force_append = ...
'Write a set of LOCAL_XXX definitions for Android NDK. These variable definitions will be used by Android NDK but do nothing for non-Android applications. Arguments: module_name: Android NDK module name, which must be unique among all module names. all_sources: A list of source files (will be filtered by Compilable). l...
def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
if (self.type not in ('executable', 'shared_library', 'static_library')): return self.WriteLn('# Variable definitions for Android applications') self.WriteLn('include $(CLEAR_VARS)') self.WriteLn(('LOCAL_MODULE := ' + module_name)) self.WriteLn('LOCAL_CFLAGS := ...
'Convert a path to its output directory form.'
def Objectify(self, path):
if ('$(' in path): path = path.replace('$(obj)/', ('$(obj).%s/$(TARGET)/' % self.toolset)) if (not ('$(obj)' in path)): path = ('$(obj).%s/$(TARGET)/%s' % (self.toolset, path)) return path
'Convert a prefix header path to its output directory form.'
def Pchify(self, path, lang):
path = self.Absolutify(path) if ('$(' in path): path = path.replace('$(obj)/', ('$(obj).%s/$(TARGET)/pch-%s' % (self.toolset, lang))) return path return ('$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path))
'Convert a subdirectory-relative path into a base-relative path. Skips over paths that contain variables.'
def Absolutify(self, path):
if ('$(' in path): return path.rstrip('/') return os.path.normpath(os.path.join(self.path, path))
'Returns the location of the final output for an installable target.'
def _InstallableTargetInstallPath(self):
return ('$(builddir)/' + self.alias)
'Initializes the folder. Args: path: Full path to the folder. name: Name of the folder. entries: List of folder entries to nest inside this folder. May contain Folder or Project objects. May be None, if the folder is empty. guid: GUID to use for folder, if not None. items: List of solution items to include in the fol...
def __init__(self, path, name=None, entries=None, guid=None, items=None):
if name: self.name = name else: self.name = os.path.basename(path) self.path = path self.guid = guid self.entries = sorted(list((entries or []))) self.items = list((items or [])) self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
'Initializes the project. Args: path: Absolute path to the project file. name: Name of project. If None, the name will be the same as the base name of the project file. dependencies: List of other Project objects this project is dependent upon, if not None. guid: GUID to use for project, if not None. spec: Dictionary ...
def __init__(self, path, name=None, dependencies=None, guid=None, spec=None, build_file=None, config_platform_overrides=None, fixpath_prefix=None):
self.path = path self.guid = guid self.spec = spec self.build_file = build_file self.name = (name or os.path.splitext(os.path.basename(path))[0]) self.dependencies = list((dependencies or [])) self.entry_type_guid = ENTRY_TYPE_GUIDS['project'] if config_platform_overrides: self.c...
'Initializes the solution. Args: path: Path to solution file. version: Format version to emit. entries: List of entries in solution. May contain Folder or Project objects. May be None, if the folder is empty. variants: List of build variant strings. If none, a default list will be used. websiteProperties: Flag to de...
def __init__(self, path, version, entries=None, variants=None, websiteProperties=True):
self.path = path self.websiteProperties = websiteProperties self.version = version self.entries = list((entries or [])) if variants: self.variants = variants[:] else: self.variants = ['Debug|Win32', 'Release|Win32'] self.Write()
'Writes the solution file to disk. Raises: IndexError: An entry appears multiple times.'
def Write(self, writer=gyp.common.WriteOnDiff):
all_entries = set() entries_to_check = self.entries[:] while entries_to_check: e = entries_to_check.pop(0) if (e in all_entries): continue all_entries.add(e) if isinstance(e, MSVSFolder): entries_to_check += e.entries all_entries = sorted(all_entri...
'Initialize per-spider redis queue. Parameters: server -- redis connection spider -- spider instance key -- key for this queue (e.g. "%(spider)s:queue")'
def __init__(self, server, spider, key):
self.server = server self.spider = spider self.key = (key % {'spider': spider.name})
'Encode a request object'
def _encode_request(self, request):
return pickle.dumps(request_to_dict(request, self.spider), protocol=(-1))
'Decode an request previously encoded'
def _decode_request(self, encoded_request):
return request_from_dict(pickle.loads(encoded_request), self.spider)
'Return the length of the queue'
def __len__(self):
raise NotImplementedError
'Push a request'
def push(self, request):
raise NotImplementedError
'Pop a request'
def pop(self):
raise NotImplementedError
'Clear queue/stack'
def clear(self):
self.server.delete(self.key)
'Return the length of the queue'
def __len__(self):
return self.server.llen(self.key)
'Push a request'
def push(self, request):
self.server.lpush(self.key, self._encode_request(request))
'Pop a request'
def pop(self):
data = self.server.rpop(self.key) if data: return self._decode_request(data)
'Return the length of the queue'
def __len__(self):
return self.server.zcard(self.key)
'Push a request'
def push(self, request):
data = self._encode_request(request) pairs = {data: (- request.priority)} self.server.zadd(self.key, **pairs)
'Pop a request'
def pop(self):
pipe = self.server.pipeline() pipe.multi() pipe.zrange(self.key, 0, 0).zremrangebyrank(self.key, 0, 0) (results, count) = pipe.execute() if results: return self._decode_request(results[0])
'Return the length of the stack'
def __len__(self):
return self.server.llen(self.key)
'Push a request'
def push(self, request):
self.server.lpush(self.key, self._encode_request(request))
'Pop a request'
def pop(self):
data = self.server.lpop(self.key) if data: return self._decode_request(data)
'Initialize scheduler. Parameters server : Redis instance persist : bool queue_key : str queue_cls : queue class dupefilter_key : str'
def __init__(self, server, persist, queue_key, queue_cls, dupefilter_key):
self.server = server self.persist = persist self.queue_key = queue_key self.queue_cls = queue_cls self.dupefilter_key = dupefilter_key
'execute this function when open one spider'
def open(self, spider):
self.spider = spider self.queue = self.queue_cls(self.server, spider, self.queue_key) self.df = RFPDupeFilter(self.server, (self.dupefilter_key % {'spider': spider.name})) if len(self.queue): spider.log(('Resuming crawl (%d requests scheduled)' % len(self.queue)))
'Initialize duplication filter Parameters server : Redis instance key : str Where to store fingerprints'
def __init__(self, server, key):
self.server = server self.key = key
'use sismember judge whether fp is duplicate.'
def request_seen(self, request):
fp = request_fingerprint(request) if self.server.sismember(self.key, fp): return True self.server.sadd(self.key, fp) return False
'Delete data on close. Called by scrapy\'s scheduler'
def close(self, reason):
self.clear()
'Clears fingerprints data'
def clear(self):
self.server.delete(self.key)
'the stat is the file key dir, the last_modified is the file that saved to the file key dir.'
def stat_file(self, key, info):
checksum = self.fs.get(key).md5 last_modified = self.fs.get(key).upload_date return {'last_modified': last_modified, 'checksum': checksum}
'custom process_item func,so it will manage the Request result.'
def process_item(self, item, spider):
info = self.spiderinfo[spider] requests = arg_to_iter(self.get_media_requests(item, info)) dlist = [self._process_request(r, info) for r in requests] dfd = DeferredList(dlist, consumeErrors=1) dfd.addCallback(self.item_completed, item, info) return dfd.addCallback(self.another_process_item, item...
'custom process_item func,so it will manage the Request result.'
def another_process_item(self, result, item, info):
assert isinstance(result, (Item, Request)), ("WoaiduBookFile pipeline' item_completed must return Item or Request, got %s" % type(result)) if isinstance(result, Item): return result elif isinstance(result, Request): dlist = [self._process_request(r, info) for r in ...
'Only download once per book,so it pick out one from all of the download urls.'
def get_media_requests(self, item, info):
if item.get('book_download'): downloadfile_urls = [i['url'] for i in item.get('book_download') if i['url']] downloadfile_urls = list(set(itertools.chain(*downloadfile_urls))) first_download_file = list_first_item(downloadfile_urls) self.item_download[item['original_url']] = downloadf...
'Handler for success downloads.'
def media_downloaded(self, response, request, info):
referer = request.headers.get('Referer') if (response.status != 200): log.msg(format='%(medianame)s (code: %(status)s): Error downloading %(medianame)s from %(request)s referred in <%(referer)s>', level=log.WARNING, spider=info.spider, medianame=self.MEDIA_NAME, status=resp...
'return the SHA1 hash of the file url'
def file_key(self, url):
file_guid = hashlib.sha1(url).hexdigest() return ('%s_%s' % (urlparse(url).netloc, file_guid))
'judge whether is it a valid response by the Content-Type.'
def is_valid_content_type(self, response):
content_type = response.headers.get('Content-Type', '') return (content_type not in self.BOOK_FILE_CONTENT_TYPE)
'custom process_item func,so it will manage the Request result.'
def process_item(self, item, spider):
info = self.spiderinfo[spider] requests = arg_to_iter(self.get_media_requests(item, info)) dlist = [self._process_request(r, info) for r in requests] dfd = DeferredList(dlist, consumeErrors=1) dfd.addCallback(self.item_completed, item, info) return dfd.addCallback(self.another_process_item, item...
'custom process_item func,so it will manage the Request result.'
def another_process_item(self, result, item, info):
assert isinstance(result, (Item, Request)), ("WoaiduBookFile pipeline' item_completed must return Item or Request, got %s" % type(result)) if isinstance(result, Item): return result elif isinstance(result, Request): dlist = [self._process_request(r, info) for r in ...
'Only download once per book,so it pick out one from all of the download urls.'
def get_media_requests(self, item, info):
if item.get('book_download'): downloadfile_urls = [i['url'] for i in item.get('book_download') if i['url']] downloadfile_urls = list(set(itertools.chain(*downloadfile_urls))) first_download_file = list_first_item(downloadfile_urls) self.item_download[item['original_url']] = downloadf...
'judge whether is it a valid response by the Content-Type.'
def is_valid_content_type(self, response):
content_type = response.headers.get('Content-Type', '') return (content_type not in self.BOOK_FILE_CONTENT_TYPE)
'The only async framework that PyMongo fully supports is Gevent. Currently there is no great way to use PyMongo in conjunction with Tornado or Twisted. PyMongo provides built-in connection pooling, so some of the benefits of those frameworks can be achieved just by writing multi-threaded code that shares a MongoClient....
def __init__(self):
self.style = color.color_style() try: client = MongoClient(self.MONGODB_SERVER, self.MONGODB_PORT) self.db = client[self.MONGODB_DB] except Exception as e: print self.style.ERROR(('ERROR(SingleMongodbPipeline): %s' % (str(e),))) traceback.print_exc()
'The only async framework that PyMongo fully supports is Gevent. Currently there is no great way to use PyMongo in conjunction with Tornado or Twisted. PyMongo provides built-in connection pooling, so some of the benefits of those frameworks can be achieved just by writing multi-threaded code that shares a MongoClient....
def __init__(self):
self.style = color.color_style() try: client = MongoClient(self.MONGODB_SERVER, self.MONGODB_PORT) self.db = client[self.MONGODB_DB] except Exception as e: print self.style.ERROR(('ERROR(ShardMongodbPipeline): %s' % (str(e),))) traceback.print_exc()
'the stat is the file key dir, the last_modified is the file that saved to the file key dir.'
def stat_file(self, key, info):
keydir = os.path.join(self.basedir, *key.split('/')) filenames = os.listdir(keydir) if (len(filenames) != 1): shutil.rmtree(keydir, True) return {} else: filename = list_first_item(filenames) absolute_path = self._get_filesystem_path(key) try: last_modified = os.p...
'Handler for success downloads.'
def media_downloaded(self, response, request, info):
referer = request.headers.get('Referer') if (response.status != 200): log.msg(format='%(medianame)s (code: %(status)s): Error downloading %(medianame)s from %(request)s referred in <%(referer)s>', level=log.WARNING, spider=info.spider, medianame=self.MEDIA_NAME, status=resp...
'judge whether is it a valid response by the Content-Type.'
def is_valid_content_type(self, response):
return True
'return the SHA1 hash of the file url'
def file_key(self, url):
file_guid = hashlib.sha1(url).hexdigest() return ('%s/%s' % (urlparse(url).netloc, file_guid))
'Get the raw file name that the sever transfer to. It examine two places:Content-Disposition,url.'
def get_file_name(self, request, response):
content_dispo = response.headers.get('Content-Disposition', '') filename = '' if content_dispo: for i in content_dispo.split(';'): if ('filename' in i): filename = i.split('filename=')[1].strip(' \n\'"') break if filename: if (urlparse(reque...
'the scrapy documention said that: If it returns a Request object, the returned request will be rescheduled (in the Scheduler) to be downloaded in the future. The callback of the original request will always be called. If the new request has a callback it will be called with the response downloaded, and the output of t...
def process_request(self, request, spider):
gcd = self.cache[spider] if gcd: if (urlparse(request.url).netloc in gcd): request = request.replace(url=(self.google_cache + request.url)) request.meta['google_cache'] = True return request
'Parse the response payload and return the result. Returns a tuple that contains the result data and the cursors (or None if not present).'
def parse(self, method, payload):
raise NotImplementedError
'Parse the error message from payload. If unable to parse the message, throw an exception and default error message will be used.'
def parse_error(self, method, payload):
raise NotImplementedError
'Parse a JSON object into a model instance.'
@classmethod def parse(cls, api, json):
raise NotImplementedError
'Parse a list of JSON objects into a result set of model instances.'
@classmethod def parse_list(cls, api, json_list):
results = ResultSet() for obj in json_list: results.append(cls.parse(api, obj)) return results
'Apply authentication headers to request'
def apply_auth(self, url, method, headers, parameters):
raise NotImplementedError
'Return the username of the authenticated user'
def get_username(self):
raise NotImplementedError
'Get the authorization URL to redirect the user'
def get_authorization_url(self, signin_with_twitter=False):
try: self.request_token = self._get_request_token() if signin_with_twitter: url = self._get_oauth_url('authenticate') else: url = self._get_oauth_url('authorize') request = oauth.OAuthRequest.from_token_and_callback(token=self.request_token, http_url=url, call...
'After user has authorized the request token, get access token with user supplied verifier.'
def get_access_token(self, verifier=None):
try: url = self._get_oauth_url('access_token') request = oauth.OAuthRequest.from_consumer_and_token(self._consumer, token=self.request_token, http_url=url, verifier=str(verifier)) request.sign_request(self._sigmethod, self._consumer, self.request_token) resp = urlopen(Request(url, he...
'Returns a token from something like: oauth_token_secret=xxx&oauth_token=xxx'
def from_string(s):
params = cgi.parse_qs(s, keep_blank_values=False) key = params['oauth_token'][0] secret = params['oauth_token_secret'][0] token = OAuthToken(key, secret) try: token.callback_confirmed = params['oauth_callback_confirmed'][0] except KeyError: pass return token
'Get any non-OAuth parameters.'
def get_nonoauth_parameters(self):
parameters = {} for (k, v) in self.parameters.iteritems(): if (k.find('oauth_') < 0): parameters[k] = v return parameters
'Serialize as a header for an HTTPAuth request.'
def to_header(self, realm=''):
auth_header = ('OAuth realm="%s"' % realm) if self.parameters: for (k, v) in self.parameters.iteritems(): if (k[:6] == 'oauth_'): auth_header += (', %s="%s"' % (k, escape(str(v)))) return {'Authorization': auth_header}
'Serialize as post data for a POST request.'
def to_postdata(self):
return '&'.join([('%s=%s' % (escape(str(k)), escape(str(v)))) for (k, v) in self.parameters.iteritems()])
'Serialize as a URL for a GET request.'
def to_url(self):
return ('%s?%s' % (self.get_normalized_http_url(), self.to_postdata()))
'Return a string that contains the parameters that must be signed.'
def get_normalized_parameters(self):
params = self.parameters try: del params['oauth_signature'] except: pass key_values = [(escape(_utf8_str(k)), escape(_utf8_str(v))) for (k, v) in params.items()] key_values.sort() return '&'.join([('%s=%s' % (k, v)) for (k, v) in key_values])
'Uppercases the http method.'
def get_normalized_http_method(self):
return self.http_method.upper()
'Parses the URL and rebuilds it to be scheme://host/path.'
def get_normalized_http_url(self):
parts = urlparse.urlparse(self.http_url) (scheme, netloc, path) = parts[:3] if ((scheme == 'http') and (netloc[(-3):] == ':80')): netloc = netloc[:(-3)] elif ((scheme == 'https') and (netloc[(-4):] == ':443')): netloc = netloc[:(-4)] return ('%s://%s%s' % (scheme, netloc, path))
'Set the signature parameter to the result of build_signature.'
def sign_request(self, signature_method, consumer, token):
self.set_parameter('oauth_signature_method', signature_method.get_name()) self.set_parameter('oauth_signature', self.build_signature(signature_method, consumer, token))
'Calls the build signature method within the signature method.'
def build_signature(self, signature_method, consumer, token):
return signature_method.build_signature(self, consumer, token)
'Combines multiple parameter sources.'
def from_request(http_method, http_url, headers=None, parameters=None, query_string=None):
if (parameters is None): parameters = {} if (headers and ('Authorization' in headers)): auth_header = headers['Authorization'] if (auth_header[:6] == 'OAuth '): auth_header = auth_header[6:] try: header_params = OAuthRequest._split_header(auth_h...
'Turn Authorization: header into parameters.'
def _split_header(header):
params = {} parts = header.split(',') for param in parts: if (param.find('realm') > (-1)): continue param = param.strip() param_parts = param.split('=', 1) params[param_parts[0]] = urllib.unquote(param_parts[1].strip('"')) return params
'Turn URL string into parameters.'
def _split_url_string(param_str):
parameters = cgi.parse_qs(param_str, keep_blank_values=False) for (k, v) in parameters.iteritems(): parameters[k] = urllib.unquote(v[0]) return parameters
'Processes a request_token request and returns the request token on success.'
def fetch_request_token(self, oauth_request):
try: token = self._get_token(oauth_request, 'request') except OAuthError: version = self._get_version(oauth_request) consumer = self._get_consumer(oauth_request) try: callback = self.get_callback(oauth_request) except OAuthError: callback = None ...
'Processes an access_token request and returns the access token on success.'
def fetch_access_token(self, oauth_request):
version = self._get_version(oauth_request) consumer = self._get_consumer(oauth_request) try: verifier = self._get_verifier(oauth_request) except OAuthError: verifier = None token = self._get_token(oauth_request, 'request') self._check_signature(oauth_request, consumer, token) ...
'Verifies an api call and checks all the parameters.'
def verify_request(self, oauth_request):
version = self._get_version(oauth_request) consumer = self._get_consumer(oauth_request) token = self._get_token(oauth_request, 'access') self._check_signature(oauth_request, consumer, token) parameters = oauth_request.get_nonoauth_parameters() return (consumer, token, parameters)
'Authorize a request token.'
def authorize_token(self, token, user):
return self.data_store.authorize_request_token(token, user)