code
stringlengths
3
1.01M
repo_name
stringlengths
5
116
path
stringlengths
3
311
language
stringclasses
30 values
license
stringclasses
15 values
size
int64
3
1.01M
# Generated by Django 2.1 on 2018-08-13 08:04 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('ibms', '0006_auto_20180813_1603'), ] operations = [ migrations.RenameField( model_name='serviceprioritymappings', old_name='costcentreName', new_name='costCentreName', ), ]
parksandwildlife/ibms
ibms_project/ibms/migrations/0007_auto_20180813_1604.py
Python
apache-2.0
391
""" Python wrapper for functionality exposed in the TemcaGraph dll. @author: jayb """ from ctypes import * import logging import threading import time import os import sys import numpy as np from pytemca.image.imageproc import fit_sin from numpy.ctypeslib import ndpointer if sys.flags.debug: rel = "../x64/Debug/TemcaGraphDLL.dll" else: rel = "../x64/Release/TemcaGraphDLL.dll" dll_path = os.path.join(os.path.dirname(__file__), rel) class StatusCallbackInfo(Structure): _fields_ = [ ("status", c_int), # -1 : fatal error # 0: finishied init (startup), # 1: starting new frame, # 2: finished frame capture (ie. time to move the stage), # 3: Sync step completed # 4: Async step completed # 5: Processing finished (except Async graphs) # 6: Shutdown finished ("info_code", c_int), # value indicates which sync or async step completed ("error_string", c_char * 256) ] STATUSCALLBACKFUNC = CFUNCTYPE(c_int, POINTER(StatusCallbackInfo)) # returns c_int class CameraInfo(Structure): ''' Information about the current camera in use. ''' _fields_ = [ ("width", c_int), ("height", c_int), ("format", c_int), ("pixel_depth", c_int), ("camera_bpp", c_int), ("camera_model", c_char * 256), ("camera_id", c_char * 256) ] class FocusInfo(Structure): ''' Information about focus quality. ''' _fields_ = [ ("focus_score", c_float), ("astig_score", c_float), ("astig_angle", c_float), ("astig_profile", c_float * 360) ] class QCInfo(Structure): ''' Information about image quality. ''' _fields_ = [ ("min_value", c_int), ("max_value", c_int), ("mean_value", c_int), ("histogram", c_int * 256), ] class ROIInfo(Structure): ''' Information about the selected ROI used for stitching. ''' _fields_ = [ ("gridX", c_int), ("gridY", c_int), ] class MatcherInfo(Structure): ''' Match parameters from the Matcher. ''' _fields_ = [ ("dX", c_float), ("dY", c_float), ("distance", c_float), ("rotation", c_float), ("good_matches", c_int), ] class TemcaGraphDLL(object): """ Hooks onto the C++ DLL. These are all the foreign functions we are going to be using from the dll, along with their arguments types and return values. """ _TemcaGraphDLL = WinDLL(dll_path) open = _TemcaGraphDLL.temca_open open.argtypes = [c_int, c_char_p, STATUSCALLBACKFUNC] open.restype = c_uint32 close = _TemcaGraphDLL.temca_close close.argtype = [None] close.restype = c_uint32 set_mode = _TemcaGraphDLL.setMode set_mode.argtypes = [c_char_p] set_mode.restype = c_uint32 get_camera_info = _TemcaGraphDLL.getCameraInfo get_camera_info.restype = CameraInfo get_focus_info = _TemcaGraphDLL.getFocusInfo get_focus_info.restype = FocusInfo set_fft_size = _TemcaGraphDLL.setFFTSize set_fft_size.argtypes = [c_int, c_int, c_int] set_fft_size.restype = None get_qc_info = _TemcaGraphDLL.getQCInfo get_qc_info.restype = QCInfo grab_frame = _TemcaGraphDLL.grabFrame grab_frame.argtypes = [c_char_p, c_int, c_int] grab_frame.restype = None get_last_frame = _TemcaGraphDLL.getLastFrame get_last_frame.argtypes = [ndpointer(c_uint16, flags="C_CONTIGUOUS")] get_last_frame.restype = None get_preview_frame = _TemcaGraphDLL.getPreviewFrame get_preview_frame.argtypes = [ndpointer(c_uint8, flags="C_CONTIGUOUS")] get_preview_frame.restype = None set_parameter = _TemcaGraphDLL.setParameter set_parameter.argtypes = [c_char_p, c_int] set_parameter.restype = None get_parameter = _TemcaGraphDLL.getParameter get_parameter.argtypes = [c_char_p] get_parameter.restype = c_uint32 get_status = _TemcaGraphDLL.getStatus get_status.restype = StatusCallbackInfo setRoiInfo = _TemcaGraphDLL.setROI setRoiInfo.restype = None setRoiInfo.argtypes = [ POINTER( ROIInfo) ] grab_matcher_template = _TemcaGraphDLL.grabMatcherTemplate grab_matcher_template.restype = None grab_matcher_template.argtypes = [c_int, c_int, c_int, c_int] get_matcher_info = _TemcaGraphDLL.getMatcherInfo get_matcher_info.restype = MatcherInfo get_matcher_info.argtypes = None class TemcaGraph(object): ''' Python class which wraps the C++ TemcaGraphDLL and provides the linkage between Python and the C++ OpenCVGraph world. The Python events which are triggered by C++ callbacks are:: eventInitCompleted - all graphs have finished building eventStartNewFrame - ready for client to issue a frame grab request eventCaptureCompleted - exposure completed eventCapturePostProcessingCompleted - xfer to CUDA, upshift, Bright/Dark correction finished eventSyncProcessingCompleted - Synchronous processing has finished eventAsyncProcessingCompleted - Asynchronous processing has finished (may overlap next exposure) eventFiniCompleted - graph has finished shutting down ''' def __init__(self,): ''' Many additional class variables are defined in the open() function ''' self.aborting = False self.eventInitCompleted = threading.Event() # Event signalling that initialization is complete. self.eventStartNewFrame = threading.Event() self.eventCaptureCompleted = threading.Event() self.eventCapturePostProcessingCompleted = threading.Event() self.eventSyncProcessingCompleted = threading.Event() self.eventAsyncProcessingCompleted = threading.Event() self.eventFiniCompleted = threading.Event() # all events after eventStartNewFrame, and before eventFiniCompleted self.eventsAllCaptureLoop = [self.eventCaptureCompleted, self.eventCapturePostProcessingCompleted, self.eventSyncProcessingCompleted, self.eventAsyncProcessingCompleted] self.threadLock = threading.Lock() self.preview_decimation_factor = 4 self.wait_time = 10 # in seconds. If we reach this limit, its an error def wait_graph_event (self, event): ''' Waits for the specified event to signal indicating a change in the graph state, and then clears the event. ''' self.threadLock.acquire() event.wait(self.wait_time) event.clear() self.threadLock.release() def wait_all_capture_events(self): for e in self.eventsAllCaptureLoop: self.wait_graph_event(e) def wait_start_of_frame(self): ''' Wait for the event which indicates the graph is ready to start a new frame. ''' self.wait_graph_event(self.eventStartNewFrame) def open(self, dummyCamera = False, dummyPath = None, callback=None): ''' Open up the Temca C++ DLL. If dummyCamera is True, create a dummy TEMCA image source using... either a real camera, image, directory, or movie according to dummyPath which MUST be specified as no default path is provided. If dummyPath is an integer string, then an OpenCV camera will be used corresponding to that index. ''' if callback == None: callback = self.statusCallback # prevent the callback from being garbage collected !!! self.callback = STATUSCALLBACKFUNC(callback) self.dummyPath = dummyPath t = time.clock() if not TemcaGraphDLL.open(dummyCamera, self.dummyPath, self.callback): raise EnvironmentError('Cannot open TemcaGraphDLL. Possiblities: camera, is offline, not installed, or already in use') logging.info("TemcaGraph DLL initialized in %s seconds" % (time.clock() - t)) self.eventInitCompleted.wait() # get info about frame dimensions fi = self.get_camera_info() self.image_width = fi['width'] self.image_height = fi['height'] self.pixel_depth = fi['pixel_depth'] # 16 ALWAYS self.camera_bpp = fi['camera_bpp'] # 12 for Ximea (upshift to full 16 bpp) self.camera_model = fi['camera_model'] self.camera_id = fi['camera_id'] # if this is changed dynamically, reallocate preview frames self.set_parameter('preview_decimation_factor', self.preview_decimation_factor) def close(self): ''' Close down all graphs. ''' TemcaGraphDLL.close() def set_mode(self, graphType): ''' Sets the overall mode of operation for the Temca graph. Each mode activates a subset of the overall graph.:: graphType SYNC ASYNC ----------------------------------------------------- temca : ximea, postCap, QC Stitch Focus FileWriter raw : ximea, postCap, FileWriter preview : ximea, postCap, QC Focus ''' return TemcaGraphDLL.set_mode(graphType) def set_parameter(self, parameter, value): ''' General purpose way to set random parameters on the graph. 'value' must be an int. Valid parameters are:: 'exposure' for Ximea, this is in microseconds 'gain' for Ximea, this is in dB * 1000 'preview_decimation_factor' (2, 4, 8, ...) ''' TemcaGraphDLL.set_parameter(parameter, value) def get_parameter(self, parameter): ''' General purpose way to get random parameters on the graph. Return value is an int. Valid parameters are given under set_parameter. ''' return TemcaGraphDLL.get_parameter(parameter) def get_camera_info(self): ''' Returns a dictionary with details of the capture format including width, height, bytes per pixel, and the camera model and serial number. ''' info = TemcaGraphDLL.get_camera_info() return {'width' : info.width, 'height' : info.height, 'pixel_depth' : info.pixel_depth, 'camera_bpp' : info.camera_bpp, 'camera_model' : info.camera_model, 'camera_id' : info.camera_id} def get_focus_info(self): ''' returns focus and astigmatism values, some calculated in CUDA, some in python ''' info = TemcaGraphDLL.get_focus_info() astig_amp, astig_angle, offset, wave = fit_sin(info.astig_profile) astig_score = astig_amp/np.ptp(info.astig_profile) array_type = c_float*len(info.astig_profile) astig_profile_pointer = cast(info.astig_profile, POINTER(array_type)) astig_numpy = np.frombuffer(astig_profile_pointer.contents, dtype=np.float32) # return the profile? return {'focus_score': info.focus_score, 'astig_score': astig_score, 'astig_angle' : astig_angle, 'astig_profile' : astig_numpy,} def set_fft_size(self, dimension, start_freq, end_freq): ''' Set the dimension of the FFT (which must be a power of 2) and the start and end frequency for focus/astig measurement. Both start and end frequencies must be less than dimension. ''' TemcaGraphDLL.set_fft_size(dimension, start_freq, end_freq); def get_qc_info(self): ''' Get the min, max, mean, and histogram from the last image acquired. ''' info = TemcaGraphDLL.get_qc_info() array_type = c_int*len(info.histogram) hist_profile_pointer = cast(info.histogram, POINTER(array_type)) hist_numpy = np.frombuffer(hist_profile_pointer.contents, dtype=np.int32) return {'min':info.min_value, 'max': info.max_value, 'mean':info.mean_value, 'histogram':hist_numpy} def grab_matcher_template(self, x, y, width, height): ''' Set the ROI to use as the template on the next image acquired. ''' TemcaGraphDLL.grab_matcher_template(x, y, width, height) def get_matcher_info(self): ''' Return Match status from the matcher. If "good_matches" is 0, then the match operation failed''' info = TemcaGraphDLL.get_matcher_info() return {'dX': info.dX, 'dY': info.dY, 'distance': info.distance, 'rotation': info.rotation, 'good_matches': info.good_matches} def get_status(self): return TemcaGraphDLL.get_status() def grab_frame(self, filename = "none", roiX = 0, roiY = 0): ''' Trigger capture of a frame. This function does not wait for completion of anything. ''' TemcaGraphDLL.grab_frame(filename, roiX, roiY) def grab_frame_wait_completion(self, filename = "none", roiX = 0, roiY = 0): ''' Trigger capture of a frame. This function waits for completion of all graphs. ''' self.wait_start_of_frame() self.grab_frame(filename, roiX, roiY) # filename doesn't matter in preview, nor does roi self.wait_all_capture_events() def allocate_frame(self): ''' Allocate memory as a numpy array to hold a complete frame (16bpp grayscale). ''' return np.zeros(shape=(self.image_width,self.image_height), dtype= np.uint16) def allocate_preview_frame(self): ''' Allocate memory as a numpy array to hold a preview frame (8bpp grayscale). ''' return np.zeros(shape=(self.image_width/self.preview_decimation_factor,self.image_height/self.preview_decimation_factor), dtype= np.uint8) def get_last_frame(self, img): ''' Get a copy of the last frame captured as an ndarray (16bpp grayscale). This must be called only after eventCapturePostProcessingCompleted has signaled and before the next frame is acquired. ''' assert (img.shape == (self.image_width, self.image_height) and (img.dtype.type == np.uint16)) TemcaGraphDLL.get_last_frame(img) def get_preview_frame(self, img): ''' Get a copy of the preview image as an ndarray (8bpp grayscale). This must be called only after eventCapturePostProcessingCompleted has signaled and before the next frame is acquired. ''' assert (img.shape == (self.image_width/self.preview_decimation_factor, self.image_height/self.preview_decimation_factor) and (img.dtype.type == np.uint8)) TemcaGraphDLL.get_preview_frame(img) def optimize_exposure(self): ''' Search for optimal exposure value using binary search. ''' min_high_value = 61000 max_high_value = 63000 exposure_step = 100000 #uS self.set_mode('preview') exp = self.get_parameter('exposure') def _searchDirection(): ''' return 0 = just right, 1 go up, -1 go down ''' self.grab_frame_wait_completion() info = self.get_qc_info() m = info['max'] if m > min_high_value and m < max_high_value: return 0 # just right elif m >= max_high_value: return +1 # too high else: return -1 # too low #overshoot top end dir = _searchDirection() while dir < 0: exp = exp + exposure_step self.set_parameter('exposure', exp) dir = _searchDirection() if dir == 0: return; exp_top = exp #overshoot bottom end while dir > 0: exp = exp - exposure_step self.set_parameter('exposure', exp) dir = _searchDirection() if dir == 0: return; exp_bottom = exp # binary search, starting from bottom exposure_step = exp_top - exp_bottom while dir != 0 and exposure_step >= 2: exposure_step = exposure_step / 2 if dir < 0: exp += exposure_step else: exp -= exposure_step self.set_parameter('exposure', exp) dir = _searchDirection() def set_roi_info (self, roiInfo): ''' Set the dimensions of the ROI. This information is used for stitching. ''' TemcaGraphDLL.setRoiInfo (roiInfo) def statusCallback (self, statusInfo): ''' Called by the C++ Temca graph runner whenever status changes. These values correspond to the Python events activated. :: -1 : fatal error 0: finished init (startup) 1: starting new frame 2: finished frame capture (ie. time to move the stage) 3: capture post processing finished (preview ready) 4: Sync step completed 5: Async step completed 6: Shutdown finished ''' retValue = True status = statusInfo.contents.status info = statusInfo.contents.info_code #logging.info ('callback status: ' + str(status) + ', info: ' + str(info)) tid = threading.currentThread() if (status == -1): self.aborting = True error_string = statusInfo.contents.error_string logging.info ('callback error is' + error_string) retValue = False elif status == 0: # finished initialization of all graphs self.eventInitCompleted.set() elif status == 1: # ready to start the next frame (start of the loop) self.eventStartNewFrame.set() elif status == 2: # capture completed # (move the stage now) self.eventCaptureCompleted.set() elif status == 3: # post processing finished (*16, bright dark, spatial correction, preview ready) self.eventCapturePostProcessingCompleted.set() elif status == 4: # all synchronous processing for the frame is complete self.eventSyncProcessingCompleted.set() elif status == 5: # all asynchronous processing for the frame is complete self.eventAsyncProcessingCompleted.set() elif status == 6: # graph is finished all processing. Close app. self.eventFiniCompleted.set() return retValue if __name__ == '__main__': import cv2 logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') # Open the DLL which runs all TEMCA graphs #os.environ["PATH"] += os.pathsep temcaGraph = TemcaGraph() temcaGraph.open(dummyCamera = True) showRawImage = True showPreviewImage = True if showRawImage or showPreviewImage: import numpy as np if showRawImage: imgRaw = temcaGraph.allocate_frame() if showPreviewImage: imgPreview = temcaGraph.allocate_preview_frame() # 8bpp and decimated # wait for graph to complete initialization temcaGraph.eventInitCompleted.wait(temcaGraph.wait_time) #temcaGraph.optimize_exposure() temcaGraph.set_mode('preview') #for j in range(10): # temcaGraph.grab_frame_wait_completion() # sys.stdout.write('.') # info = temcaGraph.get_qc_info() #for mode in ['temca', 'preview', 'raw']: #for mode in ['temca']: for mode in ['preview']: print print mode temcaGraph.set_mode(mode) frameCounter = 0 # set ROI grid size (for stitching only) roiInfo = ROIInfo() roiInfo.gridX = 5 roiInfo.gridY = 5 temcaGraph.set_roi_info (roiInfo) for y in range(roiInfo.gridY): for x in range (roiInfo.gridX): if temcaGraph.aborting: break temcaGraph.wait_start_of_frame() temcaGraph.grab_frame('j:/junk/pyframe' + str(frameCounter) + '.tif', x, y) # filename doesn't matter in preview sys.stdout.write('.') temcaGraph.wait_graph_event(temcaGraph.eventCaptureCompleted) # move the stage here # wait for Async ready event (stitching complete for previous frame) if frameCounter > 0: temcaGraph.wait_graph_event(temcaGraph.eventAsyncProcessingCompleted) # wait for preview ready event temcaGraph.wait_graph_event(temcaGraph.eventCapturePostProcessingCompleted) # get a copy of the frame and display it? if showRawImage: temcaGraph.get_last_frame(imgRaw) cv2.imshow('imgRaw', imgRaw) cv2.waitKey(1); # get a copy of the preview and display it? if showPreviewImage: temcaGraph.get_preview_frame(imgPreview) cv2.imshow('imgPreview', imgPreview) cv2.waitKey(1); # wait for Sync ready event (QC and Focus complete) temcaGraph.wait_graph_event(temcaGraph.eventSyncProcessingCompleted) qcInfo = temcaGraph.get_qc_info() #histogram = qcInfo['histogram'] focusInfo = temcaGraph.get_focus_info() #print qcInfo frameCounter += 1 temcaGraph.close() temcaGraph.wait_graph_event(temcaGraph.eventFiniCompleted)
jaybo/OpenCVGraph
TemcaGraphPy/temca_graph.py
Python
apache-2.0
21,780
/* overrides_ie8.css - contains override styles for Internet Explorer version 8 All IE8 styles have been moved to individual core stylesheets. IE8-specific styles are denoted with one of the following comments after each individual attribute and value: */ /* IE8 */ /* IE8 and IE9 */ /* This file is still fully functional. You may add your IE8 styles here as you have in previous versions */
SmarterApp/ItemAuthoring
sbac-iaip-rpm-installer/iaip-tomcatjasper-rpm/src/main/opt/tomcat-jasper/webapps/jasperserver/themes/default/overrides_ie8.css
CSS
apache-2.0
398
# -------------------------------------------------------- # Deformable Convolutional Networks # Copyright (c) 2016 by Contributors # Copyright (c) 2017 Microsoft # Licensed under The Apache-2.0 License [see LICENSE for details] # Modified by Zheng Zhang # -------------------------------------------------------- import numpy as np import mxnet as mx import random import math from mxnet.executor_manager import _split_input_slice from utils.image import tensor_vstack from segmentation.segmentation import get_segmentation_train_batch, get_segmentation_test_batch from PIL import Image from multiprocessing import Pool class TestDataLoader(mx.io.DataIter): def __init__(self, segdb, config, batch_size=1, shuffle=False): super(TestDataLoader, self).__init__() # save parameters as properties self.segdb = segdb self.batch_size = batch_size self.shuffle = shuffle self.config = config # infer properties from roidb self.size = len(self.segdb) self.index = np.arange(self.size) # decide data and label names (only for training) self.data_name = ['data'] self.label_name = None # status variable for synchronization between get_data and get_label self.cur = 0 self.data = None self.label = [] self.im_info = None # get first batch to fill in provide_data and provide_label self.reset() self.get_batch() @property def provide_data(self): return [[(k, v.shape) for k, v in zip(self.data_name, self.data[i])] for i in xrange(len(self.data))] @property def provide_label(self): return [None for i in xrange(len(self.data))] @property def provide_data_single(self): return [(k, v.shape) for k, v in zip(self.data_name, self.data[0])] @property def provide_label_single(self): return None def reset(self): self.cur = 0 if self.shuffle: np.random.shuffle(self.index) def iter_next(self): return self.cur < self.size def next(self): if self.iter_next(): self.get_batch() self.cur += self.batch_size return mx.io.DataBatch(data=self.data, label=self.label, pad=self.getpad(), index=self.getindex(), provide_data=self.provide_data, provide_label=self.provide_label) else: raise StopIteration def getindex(self): return self.cur / self.batch_size def getpad(self): if self.cur + self.batch_size > self.size: return self.cur + self.batch_size - self.size else: return 0 def get_batch(self): cur_from = self.cur cur_to = min(cur_from + self.batch_size, self.size) segdb = [self.segdb[self.index[i]] for i in range(cur_from, cur_to)] data, label, im_info = get_segmentation_test_batch(segdb, self.config) self.data = [[mx.nd.array(data[i][name]) for name in self.data_name] for i in xrange(len(data))] self.im_info = im_info class TrainDataLoader(mx.io.DataIter): def __init__(self, sym, segdb, config, batch_size=1, crop_height = 768, crop_width = 1024, shuffle=False, ctx=None, work_load_list=None): """ This Iter will provide seg data to Deeplab network :param sym: to infer shape :param segdb: must be preprocessed :param config: config file :param batch_size: must divide BATCH_SIZE(128) :param crop_height: the height of cropped image :param crop_width: the width of cropped image :param shuffle: bool :param ctx: list of contexts :param work_load_list: list of work load :return: DataLoader """ super(TrainDataLoader, self).__init__() # save parameters as properties self.sym = sym self.segdb = segdb self.config = config self.batch_size = batch_size if self.config.TRAIN.ENABLE_CROP: self.crop_height = crop_height self.crop_width = crop_width else: self.crop_height = None self.crop_width = None self.shuffle = shuffle self.ctx = ctx if self.ctx is None: self.ctx = [mx.cpu()] self.work_load_list = work_load_list # infer properties from segdb self.size = len(segdb) self.index = np.arange(self.size) # decide data and label names self.data_name = ['data'] self.label_name = ['label'] # status variable for synchronization between get_data and get_label self.cur = 0 self.batch = None self.data = None self.label = None # init multi-process pool self.pool = Pool(processes = len(self.ctx)) # get first batch to fill in provide_data and provide_label self.reset() self.get_batch_parallel() random.seed() @property def provide_data(self): return [[(k, v.shape) for k, v in zip(self.data_name, self.data[i])] for i in xrange(len(self.data))] @property def provide_label(self): return [[(k, v.shape) for k, v in zip(self.label_name, self.label[i])] for i in xrange(len(self.data))] @property def provide_data_single(self): return [(k, v.shape) for k, v in zip(self.data_name, self.data[0])] @property def provide_label_single(self): return [(k, v.shape) for k, v in zip(self.label_name, self.label[0])] def reset(self): self.cur = 0 if self.shuffle: np.random.shuffle(self.index) def iter_next(self): return self.cur + self.batch_size <= self.size def next(self): if self.iter_next(): self.get_batch_parallel() self.cur += self.batch_size return mx.io.DataBatch(data=self.data, label=self.label, pad=self.getpad(), index=self.getindex(), provide_data=self.provide_data, provide_label=self.provide_label) else: raise StopIteration def getindex(self): return self.cur / self.batch_size def getpad(self): if self.cur + self.batch_size > self.size: return self.cur + self.batch_size - self.size else: return 0 def infer_shape(self, max_data_shape=None, max_label_shape=None): """ Return maximum data and label shape for single gpu """ if max_data_shape is None: max_data_shape = [] if max_label_shape is None: max_label_shape = [] max_shapes = dict(max_data_shape + max_label_shape) _, label_shape, _ = self.sym.infer_shape(**max_shapes) label_shape = [(self.label_name[0], label_shape)] return max_data_shape, label_shape def get_batch_parallel(self): cur_from = self.cur cur_to = min(cur_from + self.batch_size, self.size) segdb = [self.segdb[self.index[i]] for i in range(cur_from, cur_to)] # decide multi device slice work_load_list = self.work_load_list ctx = self.ctx if work_load_list is None: work_load_list = [1] * len(ctx) assert isinstance(work_load_list, list) and len(work_load_list) == len(ctx), \ "Invalid settings for work load. " slices = _split_input_slice(self.batch_size, work_load_list) multiprocess_results = [] for idx, islice in enumerate(slices): isegdb = [segdb[i] for i in range(islice.start, islice.stop)] multiprocess_results.append(self.pool.apply_async(parfetch, (self.config, self.crop_width, self.crop_height, isegdb))) rst = [multiprocess_result.get() for multiprocess_result in multiprocess_results] all_data = [_['data'] for _ in rst] all_label = [_['label'] for _ in rst] self.data = [[mx.nd.array(data[key]) for key in self.data_name] for data in all_data] self.label = [[mx.nd.array(label[key]) for key in self.label_name] for label in all_label] def parfetch(config, crop_width, crop_height, isegdb): # get testing data for multigpu data, label = get_segmentation_train_batch(isegdb, config) if config.TRAIN.ENABLE_CROP: data_internal = data['data'] label_internal = label['label'] sx = math.floor(random.random() * (data_internal.shape[3] - crop_width + 1)) sy = math.floor(random.random() * (data_internal.shape[2] - crop_height + 1)) sx = (int)(sx) sy = (int)(sy) assert(sx >= 0 and sx < data_internal.shape[3] - crop_width + 1) assert(sy >= 0 and sy < data_internal.shape[2] - crop_height + 1) ex = (int)(sx + crop_width - 1) ey = (int)(sy + crop_height - 1) data_internal = data_internal[:, :, sy : ey + 1, sx : ex + 1] label_internal = label_internal[:, :, sy : ey + 1, sx : ex + 1] data['data'] = data_internal label['label'] = label_internal assert (data['data'].shape[2] == crop_height) and (data['data'].shape[3] == crop_width) assert (label['label'].shape[2] == crop_height) and (label['label'].shape[3] == crop_width) return {'data': data, 'label': label}
deepinsight/Deformable-ConvNets
deeplab/core/loader.py
Python
apache-2.0
9,374
// All material copyright ESRI, All Rights Reserved, unless otherwise specified. // See http://js.arcgis.com/3.11/esri/copyright.txt for details. //>>built define("esri/nls/widgets_pl",{"dijit/_editor/nls/commands":{removeFormat:"Usu\u0144 formatowanie",copy:"Kopiuj",paste:"Wklej",selectAll:"Wybierz wszystko",insertOrderedList:"Lista numerowana",insertTable:"Wstaw/edytuj tabel\u0119",print:"Drukuj",underline:"Podkre\u015blenie",foreColor:"Kolor pierwszego planu",htmlToggle:"\u0179r\u00f3d\u0142o HTML",formatBlock:"Styl akapitu",newPage:"Nowa strona",insertHorizontalRule:"Linia pozioma","delete":"Usu\u0144",appleKey:"\u2318${0}",insertUnorderedList:"Lista wypunktowana", tableProp:"W\u0142a\u015bciwo\u015b\u0107 tabeli",insertImage:"Wstaw obraz",superscript:"Indeks g\u00f3rny",subscript:"Indeks dolny",createLink:"Utw\u00f3rz odsy\u0142acz",undo:"Cofnij",fullScreen:"Prze\u0142\u0105cz pe\u0142ny ekran",italic:"Kursywa",fontName:"Nazwa czcionki",justifyLeft:"Wyr\u00f3wnaj do lewej",unlink:"Usu\u0144 odsy\u0142acz",toggleTableBorder:"Prze\u0142\u0105cz ramk\u0119 tabeli",viewSource:"Wy\u015bwietl kod \u017ar\u00f3d\u0142owy HTML",ctrlKey:"Ctrl+${0}",fontSize:"Wielko\u015b\u0107 czcionki", systemShortcut:"Dzia\u0142anie ${0} jest dost\u0119pne w tej przegl\u0105darce wy\u0142\u0105cznie przy u\u017cyciu skr\u00f3tu klawiaturowego. Nale\u017cy u\u017cy\u0107 klawiszy ${1}.",indent:"Wci\u0119cie",redo:"Pon\u00f3w",strikethrough:"Przekre\u015blenie",justifyFull:"Wyr\u00f3wnaj do lewej i prawej",justifyCenter:"Wyr\u00f3wnaj do \u015brodka",hiliteColor:"Kolor t\u0142a",deleteTable:"Usu\u0144 tabel\u0119",outdent:"Usu\u0144 wci\u0119cie",cut:"Wytnij",_localized:{},plainFormatBlock:"Styl akapitu", toggleDir:"Prze\u0142\u0105cz kierunek",bold:"Pogrubienie",tabIndent:"Wci\u0119cie o tabulator",justifyRight:"Wyr\u00f3wnaj do prawej"},"dojo/cldr/nls/islamic":{"dateFormatItem-Ehm":"E h:mm a","days-standAlone-short":"niedz. pon. wt. \u015br. czw. pt. sob.".split(" "),"months-format-narrow":"1 2 3 4 5 6 7 8 9 10 11 12".split(" "),"field-second-relative+0":"teraz","quarters-standAlone-narrow":["1","2","3","4"],"field-weekday":"dzie\u0144 tygodnia","field-wed-relative+0":"w t\u0119 \u015brod\u0119", "field-wed-relative+1":"w przysz\u0142\u0105 \u015brod\u0119","dateFormatItem-GyMMMEd":"E, d MMM y G","dateFormatItem-MMMEd":"E, d MMM",eraNarrow:["AH"],"field-tue-relative+-1":"w zesz\u0142y wtorek","days-format-short":"niedz. pon. wt. \u015br. czw. pt. sob.".split(" "),"dateTimeFormats-appendItem-Day-Of-Week":"{0} {1}","dateFormat-long":"d MMMM y G","field-fri-relative+-1":"w zesz\u0142y pi\u0105tek","field-wed-relative+-1":"w zesz\u0142\u0105 \u015brod\u0119","months-format-wide":"Muharram;Safar;Rabi\u02bb I;Rabi\u02bb II;D\u017cumada I;D\u017cumada II;Rad\u017cab;Szaban;Ramadan;Szawwal;Zu al-kada;Zu al-hid\u017cd\u017ca".split(";"), "dateFormatItem-yyyyQQQ":"QQQ y G","dateTimeFormat-medium":"{1}, {0}","dayPeriods-format-wide-pm":"PM","dateFormat-full":"EEEE, d MMMM y G","dateFormatItem-yyyyMEd":"E, d.MM.y G","field-thu-relative+-1":"w zesz\u0142y czwartek","dateFormatItem-Md":"d.MM",_localized:{},"dayPeriods-format-abbr-am":"AM","dateTimeFormats-appendItem-Second":"{0} ({2}: {1})","dayPeriods-format-wide-noon":"w po\u0142udnie","field-era":"era","months-standAlone-wide":"Muharram;Safar;Rabi\u02bb I;Rabi\u02bb II;D\u017cumada I;D\u017cumada II;Rad\u017cab;Szaban;Ramadan;Szawwal;Zu al-kada;Zu al-hid\u017cd\u017ca".split(";"), "timeFormat-short":"HH:mm","quarters-format-wide":["I kwarta\u0142","II kwarta\u0142","III kwarta\u0142","IV kwarta\u0142"],"timeFormat-long":"HH:mm:ss z","field-year":"rok","dateTimeFormats-appendItem-Era":"{1} {0}","field-hour":"godzina","months-format-abbr":"Muh.;Saf.;Rab. I;Rab. II;D\u017cu. I;D\u017cu. II;Ra.;Sza.;Ram.;Szaw.;Zu al-k.;Zu al-h.".split(";"),"field-sat-relative+0":"w t\u0119 sobot\u0119","field-sat-relative+1":"w przysz\u0142\u0105 sobot\u0119","timeFormat-full":"HH:mm:ss zzzz", "dateTimeFormats-appendItem-Week":"{0} ({2}: {1})","field-day-relative+0":"dzisiaj","field-thu-relative+0":"w ten czwartek","field-day-relative+1":"jutro","field-thu-relative+1":"w przysz\u0142y czwartek","dateFormatItem-GyMMMd":"d MMM y G","field-day-relative+2":"pojutrze","dateFormatItem-H":"HH","months-standAlone-abbr":"Muh.;Saf.;Rab. I;Rab. II;D\u017cu. I;D\u017cu. II;Ra.;Sza.;Ram.;Szaw.;Zu al-k.;Zu al-h.".split(";"),"quarters-format-abbr":["K1","K2","K3","K4"],"quarters-standAlone-wide":["I kwarta\u0142", "II kwarta\u0142","III kwarta\u0142","IV kwarta\u0142"],"dateFormatItem-Gy":"y G","dateFormatItem-yyyyMMMEd":"E, d MMM y G","dateFormatItem-M":"L","days-standAlone-wide":"niedziela poniedzia\u0142ek wtorek \u015broda czwartek pi\u0105tek sobota".split(" "),"dateFormatItem-yyyyMMM":"LLL y G","dateFormatItem-yyyyMMMd":"d MMM y G","dayPeriods-format-abbr-noon":"noon","timeFormat-medium":"HH:mm:ss","field-sun-relative+0":"w t\u0119 niedziel\u0119","dateFormatItem-Hm":"HH:mm","field-sun-relative+1":"w przysz\u0142\u0105 niedziel\u0119", "quarters-standAlone-abbr":["1 kw.","2 kw.","3 kw.","4 kw."],eraAbbr:["AH"],"field-minute":"minuta","field-dayperiod":"rano / po po\u0142udniu / wieczorem","days-standAlone-abbr":"niedz. pon. wt. \u015br. czw. pt. sob.".split(" "),"dateFormatItem-d":"d","dateFormatItem-ms":"mm:ss","quarters-format-narrow":["1","2","3","4"],"field-day-relative+-1":"wczoraj","dateTimeFormat-long":"{1}, {0}","dayPeriods-format-narrow-am":"a","dateFormatItem-h":"hh a","field-day-relative+-2":"przedwczoraj","dateFormatItem-MMMd":"d MMM", "dateFormatItem-MEd":"E, d.MM","dateTimeFormat-full":"{1}, {0}","field-fri-relative+0":"w ten pi\u0105tek","field-fri-relative+1":"w przysz\u0142y pi\u0105tek","field-day":"dzie\u0144","days-format-wide":"niedziela poniedzia\u0142ek wtorek \u015broda czwartek pi\u0105tek sobota".split(" "),"field-zone":"strefa czasowa","months-standAlone-narrow":"1 2 3 4 5 6 7 8 9 10 11 12".split(" "),"dateFormatItem-y":"y G","dateTimeFormats-appendItem-Day":"{0} ({2}: {1})","field-year-relative+-1":"w zesz\u0142ym roku", "field-month-relative+-1":"w zesz\u0142ym miesi\u0105cu","dateTimeFormats-appendItem-Year":"{1} {0}","dateFormatItem-hm":"hh:mm a","dateTimeFormats-appendItem-Hour":"{0} ({2}: {1})","dayPeriods-format-abbr-pm":"PM","days-format-abbr":"niedz. pon. wt. \u015br. czw. pt. sob.".split(" "),eraNames:["AH"],"days-format-narrow":"NPW\u015aCPS".split(""),"dateFormatItem-yyyyMd":"d.MM.y G","field-month":"miesi\u0105c","days-standAlone-narrow":"NPW\u015aCPS".split(""),"dateFormatItem-MMM":"LLL","field-tue-relative+0":"w ten wtorek", "field-tue-relative+1":"w przysz\u0142y wtorek","dateTimeFormats-appendItem-Quarter":"{0} ({2}: {1})","dayPeriods-format-wide-am":"AM","dateTimeFormats-appendItem-Month":"{0} ({2}: {1})","dateTimeFormats-appendItem-Minute":"{0} ({2}: {1})","dateFormatItem-EHm":"E HH:mm","field-mon-relative+0":"w ten poniedzia\u0142ek","field-mon-relative+1":"w przysz\u0142y poniedzia\u0142ek","dateFormat-short":"dd.MM.y G","dateFormatItem-EHms":"E HH:mm:ss","dateFormatItem-Ehms":"E h:mm:ss a","dayPeriods-format-narrow-noon":"n", "field-second":"sekunda","field-sat-relative+-1":"w zesz\u0142\u0105 sobot\u0119","field-sun-relative+-1":"w zesz\u0142\u0105 niedziel\u0119","field-month-relative+0":"w tym miesi\u0105cu","field-month-relative+1":"w przysz\u0142ym miesi\u0105cu","dateTimeFormats-appendItem-Timezone":"{0} {1}","dateFormatItem-Ed":"E, d","field-week":"tydzie\u0144","dateFormat-medium":"d MMM y G","field-week-relative+-1":"Zesz\u0142y tydzie\u0144","field-year-relative+0":"w tym roku","dateFormatItem-yyyyM":"MM.y G", "field-year-relative+1":"w przysz\u0142ym roku","dayPeriods-format-narrow-pm":"p","dateFormatItem-yyyyQQQQ":"QQQQ y G","dateTimeFormat-short":"{1}, {0}","dateFormatItem-Hms":"HH:mm:ss","dateFormatItem-hms":"hh:mm:ss a","dateFormatItem-GyMMM":"LLL y G","field-mon-relative+-1":"w zesz\u0142y poniedzia\u0142ek","dateFormatItem-yyyy":"y G","field-week-relative+0":"w tym tygodniu","field-week-relative+1":"w przysz\u0142ym tygodniu"},"dijit/form/nls/ComboBox":{previousMessage:"Poprzednie wybory",_localized:{}, nextMessage:"Wi\u0119cej wybor\u00f3w"}});
aconyteds/Esri-Ozone-Map-Widget
vendor/js/esri/arcgis_js_api/library/3.11/3.11/esri/nls/widgets_pl.js
JavaScript
apache-2.0
8,156
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.sql.gen; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.primitives.Primitives; import io.airlift.bytecode.BytecodeBlock; import io.airlift.bytecode.BytecodeNode; import io.airlift.bytecode.Scope; import io.airlift.bytecode.Variable; import io.airlift.bytecode.control.IfStatement; import io.airlift.bytecode.expression.BytecodeExpression; import io.airlift.bytecode.instruction.LabelNode; import io.airlift.slice.Slice; import io.prestosql.metadata.BoundSignature; import io.prestosql.metadata.FunctionInvoker; import io.prestosql.metadata.FunctionMetadata; import io.prestosql.metadata.Metadata; import io.prestosql.metadata.ResolvedFunction; import io.prestosql.spi.block.BlockBuilder; import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.function.InvocationConvention; import io.prestosql.spi.function.InvocationConvention.InvocationArgumentConvention; import io.prestosql.spi.type.Type; import io.prestosql.sql.gen.InputReferenceCompiler.InputReferenceNode; import io.prestosql.type.FunctionType; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodType; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.function.Function; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableList.toImmutableList; import static io.airlift.bytecode.OpCode.NOP; import static io.airlift.bytecode.expression.BytecodeExpressions.constantFalse; import static io.airlift.bytecode.expression.BytecodeExpressions.constantTrue; import static io.airlift.bytecode.expression.BytecodeExpressions.invokeDynamic; import static io.prestosql.spi.function.InvocationConvention.InvocationArgumentConvention.BLOCK_POSITION; import static io.prestosql.spi.function.InvocationConvention.InvocationArgumentConvention.BOXED_NULLABLE; import static io.prestosql.spi.function.InvocationConvention.InvocationArgumentConvention.FUNCTION; import static io.prestosql.spi.function.InvocationConvention.InvocationArgumentConvention.NEVER_NULL; import static io.prestosql.spi.function.InvocationConvention.InvocationArgumentConvention.NULL_FLAG; import static io.prestosql.spi.function.InvocationConvention.InvocationReturnConvention.FAIL_ON_NULL; import static io.prestosql.spi.function.InvocationConvention.InvocationReturnConvention.NULLABLE_RETURN; import static io.prestosql.sql.gen.Bootstrap.BOOTSTRAP_METHOD; import static java.lang.String.format; public final class BytecodeUtils { private BytecodeUtils() {} public static BytecodeNode ifWasNullPopAndGoto(Scope scope, LabelNode label, Class<?> returnType, Class<?>... stackArgsToPop) { return handleNullValue(scope, label, returnType, ImmutableList.copyOf(stackArgsToPop), false); } public static BytecodeNode ifWasNullPopAndGoto(Scope scope, LabelNode label, Class<?> returnType, Iterable<? extends Class<?>> stackArgsToPop) { return handleNullValue(scope, label, returnType, ImmutableList.copyOf(stackArgsToPop), false); } public static BytecodeNode ifWasNullClearPopAndGoto(Scope scope, LabelNode label, Class<?> returnType, Class<?>... stackArgsToPop) { return handleNullValue(scope, label, returnType, ImmutableList.copyOf(stackArgsToPop), true); } public static BytecodeNode handleNullValue( Scope scope, LabelNode label, Class<?> returnType, List<Class<?>> stackArgsToPop, boolean clearNullFlag) { Variable wasNull = scope.getVariable("wasNull"); BytecodeBlock nullCheck = new BytecodeBlock() .setDescription("ifWasNullGoto") .append(wasNull); String clearComment = null; if (clearNullFlag) { nullCheck.append(wasNull.set(constantFalse())); clearComment = "clear wasNull"; } BytecodeBlock isNull = new BytecodeBlock(); for (Class<?> parameterType : stackArgsToPop) { isNull.pop(parameterType); } isNull.pushJavaDefault(returnType); String loadDefaultComment; loadDefaultComment = format("loadJavaDefault(%s)", returnType.getName()); isNull.gotoLabel(label); String popComment = null; if (!stackArgsToPop.isEmpty()) { popComment = format("pop(%s)", Joiner.on(", ").join(stackArgsToPop)); } return new IfStatement("if wasNull then %s", Joiner.on(", ").skipNulls().join(clearComment, popComment, loadDefaultComment, "goto " + label.getLabel())) .condition(nullCheck) .ifTrue(isNull); } public static BytecodeNode boxPrimitive(Class<?> type) { BytecodeBlock block = new BytecodeBlock().comment("box primitive"); if (type == long.class) { return block.invokeStatic(Long.class, "valueOf", Long.class, long.class); } if (type == double.class) { return block.invokeStatic(Double.class, "valueOf", Double.class, double.class); } if (type == boolean.class) { return block.invokeStatic(Boolean.class, "valueOf", Boolean.class, boolean.class); } if (type.isPrimitive()) { throw new UnsupportedOperationException("not yet implemented: " + type); } return NOP; } public static BytecodeNode unboxPrimitive(Class<?> unboxedType) { BytecodeBlock block = new BytecodeBlock().comment("unbox primitive"); if (unboxedType == long.class) { return block.invokeVirtual(Long.class, "longValue", long.class); } if (unboxedType == double.class) { return block.invokeVirtual(Double.class, "doubleValue", double.class); } if (unboxedType == boolean.class) { return block.invokeVirtual(Boolean.class, "booleanValue", boolean.class); } throw new UnsupportedOperationException("not yet implemented: " + unboxedType); } public static BytecodeExpression loadConstant(CallSiteBinder callSiteBinder, Object constant, Class<?> type) { Binding binding = callSiteBinder.bind(MethodHandles.constant(type, constant)); return loadConstant(binding); } public static BytecodeExpression loadConstant(Binding binding) { return invokeDynamic( BOOTSTRAP_METHOD, ImmutableList.of(binding.getBindingId()), "constant_" + binding.getBindingId(), binding.getType().returnType()); } public static BytecodeNode generateInvocation( Scope scope, ResolvedFunction resolvedFunction, Metadata metadata, List<BytecodeNode> arguments, CallSiteBinder binder) { return generateInvocation( scope, metadata.getFunctionMetadata(resolvedFunction), invocationConvention -> metadata.getScalarFunctionInvoker(resolvedFunction, Optional.of(invocationConvention)), arguments, binder); } public static BytecodeNode generateInvocation( Scope scope, FunctionMetadata functionMetadata, Function<InvocationConvention, FunctionInvoker> functionInvokerProvider, List<BytecodeNode> arguments, CallSiteBinder binder) { return generateFullInvocation( scope, functionMetadata, functionInvokerProvider, instanceFactory -> { throw new IllegalArgumentException("Simple method invocation can not be used with functions that require an instance factory"); }, arguments.stream() .map(BytecodeUtils::simpleArgument) .collect(toImmutableList()), binder); } private static Function<Optional<Class<?>>, BytecodeNode> simpleArgument(BytecodeNode argument) { return lambdaInterface -> { checkArgument(!lambdaInterface.isPresent(), "Simple method invocation can not be used with functions that have lambda arguments"); return argument; }; } public static BytecodeNode generateFullInvocation( Scope scope, ResolvedFunction resolvedFunction, Metadata metadata, Function<MethodHandle, BytecodeNode> instanceFactory, List<Function<Optional<Class<?>>, BytecodeNode>> argumentCompilers, CallSiteBinder binder) { return generateFullInvocation( scope, metadata.getFunctionMetadata(resolvedFunction), invocationConvention -> metadata.getScalarFunctionInvoker(resolvedFunction, Optional.of(invocationConvention)), instanceFactory, argumentCompilers, binder); } public static BytecodeNode generateFullInvocation( Scope scope, FunctionMetadata functionMetadata, Function<InvocationConvention, FunctionInvoker> functionInvokerProvider, Function<MethodHandle, BytecodeNode> instanceFactory, List<Function<Optional<Class<?>>, BytecodeNode>> argumentCompilers, CallSiteBinder binder) { List<InvocationArgumentConvention> argumentConventions = new ArrayList<>(); List<BytecodeNode> arguments = new ArrayList<>(); for (int i = 0; i < functionMetadata.getSignature().getArgumentTypes().size(); i++) { if (functionMetadata.getSignature().getArgumentTypes().get(i).getBase().equalsIgnoreCase(FunctionType.NAME)) { argumentConventions.add(FUNCTION); arguments.add(null); } else { BytecodeNode argument = argumentCompilers.get(i).apply(Optional.empty()); if (argument instanceof InputReferenceNode) { argumentConventions.add(BLOCK_POSITION); } else if (functionMetadata.getArgumentDefinitions().get(i).isNullable()) { // a Java function can only have 255 arguments, so if the count is high use boxed nullable instead of the more efficient null flag argumentConventions.add(argumentCompilers.size() > 100 ? BOXED_NULLABLE : NULL_FLAG); } else { argumentConventions.add(NEVER_NULL); } arguments.add(argument); } } InvocationConvention invocationConvention = new InvocationConvention( argumentConventions, functionMetadata.isNullable() ? NULLABLE_RETURN : FAIL_ON_NULL, true, true); FunctionInvoker functionInvoker = functionInvokerProvider.apply(invocationConvention); Binding binding = binder.bind(functionInvoker.getMethodHandle()); LabelNode end = new LabelNode("end"); BytecodeBlock block = new BytecodeBlock() .setDescription("invoke " + functionMetadata.getSignature().getName()); Optional<BytecodeNode> instance = functionInvoker.getInstanceFactory() .map(instanceFactory); // Index of current parameter in the MethodHandle int currentParameterIndex = 0; // Index of parameter (without @IsNull) in Presto function int realParameterIndex = 0; MethodType methodType = binding.getType(); Class<?> returnType = methodType.returnType(); Class<?> unboxedReturnType = Primitives.unwrap(returnType); List<Class<?>> stackTypes = new ArrayList<>(); boolean boundInstance = false; while (currentParameterIndex < methodType.parameterArray().length) { Class<?> type = methodType.parameterArray()[currentParameterIndex]; stackTypes.add(type); if (instance.isPresent() && !boundInstance) { checkState(type.equals(functionInvoker.getInstanceFactory().get().type().returnType()), "Mismatched type for instance parameter"); block.append(instance.get()); boundInstance = true; } else if (type == ConnectorSession.class) { block.append(scope.getVariable("session")); } else { switch (invocationConvention.getArgumentConvention(realParameterIndex)) { case NEVER_NULL: block.append(arguments.get(realParameterIndex)); checkArgument(!Primitives.isWrapperType(type), "Non-nullable argument must not be primitive wrapper type"); block.append(ifWasNullPopAndGoto(scope, end, unboxedReturnType, Lists.reverse(stackTypes))); break; case NULL_FLAG: block.append(arguments.get(realParameterIndex)); block.append(scope.getVariable("wasNull")); block.append(scope.getVariable("wasNull").set(constantFalse())); stackTypes.add(boolean.class); currentParameterIndex++; break; case BOXED_NULLABLE: block.append(arguments.get(realParameterIndex)); block.append(boxPrimitiveIfNecessary(scope, type)); block.append(scope.getVariable("wasNull").set(constantFalse())); break; case BLOCK_POSITION: InputReferenceNode inputReferenceNode = (InputReferenceNode) arguments.get(realParameterIndex); block.append(inputReferenceNode.produceBlockAndPosition()); stackTypes.add(int.class); if (!functionMetadata.getArgumentDefinitions().get(realParameterIndex).isNullable()) { block.append(scope.getVariable("wasNull").set(inputReferenceNode.blockAndPositionIsNull())); block.append(ifWasNullPopAndGoto(scope, end, unboxedReturnType, Lists.reverse(stackTypes))); } currentParameterIndex++; break; case FUNCTION: Optional<Class<?>> lambdaInterface = functionInvoker.getLambdaInterfaces().get(realParameterIndex); block.append(argumentCompilers.get(realParameterIndex).apply(lambdaInterface)); break; default: throw new UnsupportedOperationException(format("Unsupported argument conventsion type: %s", invocationConvention.getArgumentConvention(realParameterIndex))); } realParameterIndex++; } currentParameterIndex++; } block.append(invoke(binding, functionMetadata.getSignature().getName())); if (functionMetadata.isNullable()) { block.append(unboxPrimitiveIfNecessary(scope, returnType)); } block.visitLabel(end); return block; } public static BytecodeBlock unboxPrimitiveIfNecessary(Scope scope, Class<?> boxedType) { BytecodeBlock block = new BytecodeBlock(); LabelNode end = new LabelNode("end"); Class<?> unboxedType = Primitives.unwrap(boxedType); Variable wasNull = scope.getVariable("wasNull"); if (unboxedType.isPrimitive()) { LabelNode notNull = new LabelNode("notNull"); block.dup(boxedType) .ifNotNullGoto(notNull) .append(wasNull.set(constantTrue())) .comment("swap boxed null with unboxed default") .pop(boxedType) .pushJavaDefault(unboxedType) .gotoLabel(end) .visitLabel(notNull) .append(unboxPrimitive(unboxedType)); } else { block.dup(boxedType) .ifNotNullGoto(end) .append(wasNull.set(constantTrue())); } block.visitLabel(end); return block; } public static BytecodeNode boxPrimitiveIfNecessary(Scope scope, Class<?> type) { checkArgument(!type.isPrimitive(), "cannot box into primitive type"); if (!Primitives.isWrapperType(type)) { return NOP; } BytecodeBlock notNull = new BytecodeBlock().comment("box primitive"); Class<?> expectedCurrentStackType; if (type == Long.class) { notNull.invokeStatic(Long.class, "valueOf", Long.class, long.class); expectedCurrentStackType = long.class; } else if (type == Double.class) { notNull.invokeStatic(Double.class, "valueOf", Double.class, double.class); expectedCurrentStackType = double.class; } else if (type == Boolean.class) { notNull.invokeStatic(Boolean.class, "valueOf", Boolean.class, boolean.class); expectedCurrentStackType = boolean.class; } else { throw new UnsupportedOperationException("not yet implemented: " + type); } BytecodeBlock condition = new BytecodeBlock().append(scope.getVariable("wasNull")); BytecodeBlock wasNull = new BytecodeBlock() .pop(expectedCurrentStackType) .pushNull() .checkCast(type); return new IfStatement() .condition(condition) .ifTrue(wasNull) .ifFalse(notNull); } public static BytecodeExpression invoke(Binding binding, String name) { // ensure that name doesn't have a special characters return invokeDynamic(BOOTSTRAP_METHOD, ImmutableList.of(binding.getBindingId()), sanitizeName(name), binding.getType()); } public static BytecodeExpression invoke(Binding binding, BoundSignature signature) { return invoke(binding, signature.getName()); } /** * Replace characters that are not safe to use in a JVM identifier. */ public static String sanitizeName(String name) { return name.replaceAll("[^A-Za-z0-9_$]", "_"); } public static BytecodeNode generateWrite(CallSiteBinder callSiteBinder, Scope scope, Variable wasNullVariable, Type type) { Class<?> valueJavaType = type.getJavaType(); if (!valueJavaType.isPrimitive() && valueJavaType != Slice.class) { valueJavaType = Object.class; } String methodName = "write" + Primitives.wrap(valueJavaType).getSimpleName(); // the stack contains [output, value] // We should be able to insert the code to get the output variable and compute the value // at the right place instead of assuming they are in the stack. We should also not need to // use temp variables to re-shuffle the stack to the right shape before Type.writeXXX is called // Unfortunately, because of the assumptions made by try_cast, we can't get around it yet. // TODO: clean up once try_cast is fixed Variable tempValue = scope.createTempVariable(valueJavaType); Variable tempOutput = scope.createTempVariable(BlockBuilder.class); return new BytecodeBlock() .comment("if (wasNull)") .append(new IfStatement() .condition(wasNullVariable) .ifTrue(new BytecodeBlock() .comment("output.appendNull();") .pop(valueJavaType) .invokeInterface(BlockBuilder.class, "appendNull", BlockBuilder.class) .pop()) .ifFalse(new BytecodeBlock() .comment("%s.%s(output, %s)", type.getTypeSignature(), methodName, valueJavaType.getSimpleName()) .putVariable(tempValue) .putVariable(tempOutput) .append(loadConstant(callSiteBinder.bind(type, Type.class))) .getVariable(tempOutput) .getVariable(tempValue) .invokeInterface(Type.class, methodName, void.class, BlockBuilder.class, valueJavaType))); } }
smartnews/presto
presto-main/src/main/java/io/prestosql/sql/gen/BytecodeUtils.java
Java
apache-2.0
21,345
namespace Amazon.SQS.ExtendedClient.Tests { using System; using System.Threading; using System.Threading.Tasks; using Model; using Moq; using NUnit.Framework; [TestFixture] public class When_Extended_Client_Deletes : ExtendedClientTestBase { #if NET45 [Test] public void Long_Message_It_Is_Deleted_From_s3() { var s3Key = Guid.NewGuid().ToString("N"); var longReceiptHandle = GenerateReceiptHandle(S3_BUCKET_NAME, s3Key, Constants.HandleTail); client.DeleteMessage(new DeleteMessageRequest(SQS_QUEUE_NAME, longReceiptHandle)); s3Mock.Verify(m => m.DeleteObject(It.Is<string>(s => s.Equals(S3_BUCKET_NAME)), It.Is<string>(s => s.Equals(s3Key)))); sqsMock.Verify(m => m.DeleteMessage(It.Is<DeleteMessageRequest>(r => r.QueueUrl.Equals(SQS_QUEUE_NAME) && r.ReceiptHandle.Equals(Constants.HandleTail)))); } #endif [Test] public async Task Long_Message_Async_It_Is_Deleted_From_s3() { var s3Key = Guid.NewGuid().ToString("N"); var longReceiptHandle = GenerateReceiptHandle(S3_BUCKET_NAME, s3Key, Constants.HandleTail); await client.DeleteMessageAsync(new DeleteMessageRequest(SQS_QUEUE_NAME, longReceiptHandle)); s3Mock.Verify(m => m.DeleteObjectAsync(It.Is<string>(s => s.Equals(S3_BUCKET_NAME)), It.Is<string>(s => s.Equals(s3Key)), It.IsAny<CancellationToken>())); sqsMock.Verify(m => m.DeleteMessageAsync(It.Is<DeleteMessageRequest>(r => r.QueueUrl.Equals(SQS_QUEUE_NAME) && r.ReceiptHandle.Equals(Constants.HandleTail)), It.IsAny<CancellationToken>())); } #if NET45 [Test] public void Long_Message_It_Is_NotDeleted_From_s3_When_RetainS3Messages_Is_Set() { var extendedClient = new AmazonSQSExtendedClient( sqsMock.Object, new ExtendedClientConfiguration() .WithLargePayloadSupportEnabled(s3Mock.Object, S3_BUCKET_NAME) .WithRetainS3Messages(true)); var s3Key = Guid.NewGuid().ToString("N"); var longReceiptHandle = GenerateReceiptHandle(S3_BUCKET_NAME, s3Key, Constants.HandleTail); extendedClient.DeleteMessage(new DeleteMessageRequest(SQS_QUEUE_NAME, longReceiptHandle)); s3Mock.Verify(m => m.DeleteObject(It.IsAny<string>(), It.IsAny<string>()), Times.Never); sqsMock.Verify(m => m.DeleteMessage(It.Is<DeleteMessageRequest>(r => r.QueueUrl.Equals(SQS_QUEUE_NAME) && r.ReceiptHandle.Equals(Constants.HandleTail)))); } #endif [Test] public async Task Long_Message_Async_It_Is_Deleted_From_s3_When_RetainS3Messages_Is_Set() { var extendedClient = new AmazonSQSExtendedClient( sqsMock.Object, new ExtendedClientConfiguration() .WithLargePayloadSupportEnabled(s3Mock.Object, S3_BUCKET_NAME) .WithRetainS3Messages(true)); var s3Key = Guid.NewGuid().ToString("N"); var longReceiptHandle = GenerateReceiptHandle(S3_BUCKET_NAME, s3Key, Constants.HandleTail); await extendedClient.DeleteMessageAsync(new DeleteMessageRequest(SQS_QUEUE_NAME, longReceiptHandle)); s3Mock.Verify(m => m.DeleteObjectAsync(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Never); sqsMock.Verify(m => m.DeleteMessageAsync(It.Is<DeleteMessageRequest>(r => r.QueueUrl.Equals(SQS_QUEUE_NAME) && r.ReceiptHandle.Equals(Constants.HandleTail)), It.IsAny<CancellationToken>())); } #if NET45 [Test] public void Short_Message_It_Is_Deleted_Only_From_Queue() { client.DeleteMessage(new DeleteMessageRequest(SQS_QUEUE_NAME, Constants.HandleTail)); s3Mock.Verify(m => m.DeleteObject(It.IsAny<string>(), It.IsAny<string>()), Times.Never()); sqsMock.Verify(m => m.DeleteMessage(It.Is<DeleteMessageRequest>(r => r.QueueUrl.Equals(SQS_QUEUE_NAME) && r.ReceiptHandle.Equals(Constants.HandleTail)))); } #endif [Test] public async Task Short_Message_Async_It_Is_Deleted_Only_From_Queue() { await client.DeleteMessageAsync(new DeleteMessageRequest(SQS_QUEUE_NAME, Constants.HandleTail)); s3Mock.Verify(m => m.DeleteObjectAsync(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Never()); sqsMock.Verify(m => m.DeleteMessageAsync(It.Is<DeleteMessageRequest>(r => r.QueueUrl.Equals(SQS_QUEUE_NAME) && r.ReceiptHandle.Equals(Constants.HandleTail)), It.IsAny<CancellationToken>())); } } }
raol/amazon-sqs-net-extended-client-lib
src/Amazon.SQS.ExtendedClient.Tests/When_Extended_Client_Deletes.cs
C#
apache-2.0
4,730
/** * Licensed to Cloudera, Inc. under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Cloudera, Inc. licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.flume.handlers.syslog; import java.io.DataInputStream; import java.io.IOException; import java.net.ServerSocket; import java.net.Socket; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import org.apache.log4j.Logger; import com.cloudera.flume.conf.SourceFactory.SourceBuilder; import com.cloudera.flume.core.Event; import com.cloudera.flume.core.EventSource; import com.cloudera.flume.handlers.text.EventExtractException; /** * This source listens for multiple tcp-based syslog data streams. This works * for many concurrent connections, but may run into scaling problems. * * syslog entries sent over tcp are simply delimited by '\n' characters and are * otherwise identical in format to udp-based syslog data. * * TODO (jon) setup a limit on the number of threads, find out how to modify * filehandle/socket limits on windows/linux * * TODO(jon) Either do an nio/asynchronous version, * * TODO (jon) or make separate queues for each thread -- this may cause lock * contention */ public class SyslogTcpSourceThreads extends EventSource.Base { final static Logger LOG = Logger.getLogger(SyslogTcpSourceThreads.class.getName()); final public static int SYSLOG_TCP_PORT = 514; final int port; final LinkedBlockingQueue<Event> eventsQ = new LinkedBlockingQueue<Event>(); final List<ReaderThread> readers = Collections.synchronizedList(new ArrayList<ReaderThread>()); final AtomicLong rejects = new AtomicLong(); volatile boolean closed = true; public SyslogTcpSourceThreads(int port) { this.port = port; } public SyslogTcpSourceThreads() { this(SYSLOG_TCP_PORT); // this is syslog-ng's default tcp port. } // must synchronize this variable ServerThread svrthread; ServerSocket sock = null; Object sockLock = new Object(); /** * This thread just waits to accept incoming connections and spawn a reader * thread. */ class ServerThread extends Thread { final int port; ServerThread(int port) { this.port = port; } @Override public void run() { while (!closed) { ServerSocket mySock = null; // guarantee no NPE at accept synchronized (sockLock) { mySock = sock; // get a local reference to sock. } if (mySock == null || mySock.isClosed()) return; try { Socket client = mySock.accept(); new ReaderThread(client).start(); } catch (IOException e) { if (!closed) { // could be IOException where we run out of file/socket handles. LOG.error("accept had a problem", e); } return; } } } }; /** * This thread takes a accepted socket and pull data out until it is empty. */ class ReaderThread extends Thread { Socket in; ReaderThread(Socket sock) { readers.add(this); this.in = sock; } @Override public void run() { try { // process this connection. DataInputStream dis = new DataInputStream(in.getInputStream()); while (!closed) { try { Event e = SyslogWireExtractor.extractEvent(dis); if (e == null) break; eventsQ.put(e); } catch (EventExtractException ex) { rejects.incrementAndGet(); } } // done. in.close(); } catch (IOException e) { LOG.error("IOException with SyslogTcpSources", e); } catch (InterruptedException e1) { LOG.error("put into Queue interupted" + e1); } finally { if (in != null && in.isConnected()) { try { in.close(); } catch (IOException e) { e.printStackTrace(); } } readers.remove(this); } } } @Override public void close() throws IOException { LOG.info("Closing " + this); synchronized (sockLock) { closed = true; if (sock != null) { sock.close(); sock = null; } } // wait for all readers to close (This is not robust!) if (readers.size() != 0) { List<ReaderThread> rs = new ArrayList<ReaderThread>(readers); // for (ReaderThread r : rs) { try { r.join(); } catch (InterruptedException e) { LOG.error("Reader threads interrupted, but we are closing", e); } } } try { if (svrthread != null) { svrthread.join(); svrthread = null; } } catch (InterruptedException e) { LOG.error("Reader threads interrupted, but we are closing", e); } }; @Override public Event next() throws IOException { Event e = null; try { while ((e = eventsQ.poll(1000, TimeUnit.MILLISECONDS)) == null && !closed) { // Do nothing, just checking variables if nothing has arrived. } } catch (InterruptedException e1) { LOG.error("Tcp source polling interrupted ", e1); // Fail by throwing exn throw new IOException(e1); } updateEventProcessingStats(e); return e; } @Override public void open() throws IOException { LOG.info("Opening " + this); synchronized (sockLock) { if (!closed) { throw new IOException("Attempted to double open socket"); } closed = false; if (sock == null) { // depending on number of connections, may need to increase backlog // value (automatic server socket argument, default is 50) try { sock = new ServerSocket(port); sock.setReuseAddress(true); } catch (IOException e) { throw new IOException("failed to create serversocket " + e); } } } svrthread = new ServerThread(port); svrthread.start(); } public static SourceBuilder builder() { return new SourceBuilder() { @Override public EventSource build(String... argv) { int port = SYSLOG_TCP_PORT; // default udp port, need root permissions // for this. if (argv.length > 1) { throw new IllegalArgumentException("usage: syslogTcp([port no]) "); } if (argv.length == 1) { port = Integer.parseInt(argv[0]); } return new SyslogTcpSourceThreads(port); } }; } }
hammer/flume
src/java/com/cloudera/flume/handlers/syslog/SyslogTcpSourceThreads.java
Java
apache-2.0
7,278
/* * Copyright 2002-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.util; import java.io.ByteArrayOutputStream; /** * An extension of {@link java.io.ByteArrayOutputStream} that: * <ul> * <li>has public {@link org.springframework.util.ResizableByteArrayOutputStream#grow(int)} * and {@link org.springframework.util.ResizableByteArrayOutputStream#resize(int)} methods * to get more control over the the size of the internal buffer</li> * <li>has a higher initial capacity (256) by default</li> * </ul> * * <p>As of 4.2, this class has been superseded by {@link FastByteArrayOutputStream} * for Spring's internal use where no assignability to {@link ByteArrayOutputStream} * is needed (since {@link FastByteArrayOutputStream} is more efficient with buffer * resize management but doesn't extend the standard {@link ByteArrayOutputStream}). * * @author Brian Clozel * @author Juergen Hoeller * @since 4.0.3 */ public class ResizableByteArrayOutputStream extends ByteArrayOutputStream { private static final int DEFAULT_INITIAL_CAPACITY = 256; /** * Create a new <code>ResizableByteArrayOutputStream</code> * with the default initial capacity of 256 bytes. */ public ResizableByteArrayOutputStream() { super(DEFAULT_INITIAL_CAPACITY); } /** * Create a new <code>ResizableByteArrayOutputStream</code> * with the specified initial capacity. * @param initialCapacity the initial buffer size in bytes */ public ResizableByteArrayOutputStream(int initialCapacity) { super(initialCapacity); } /** * Resize the internal buffer size to a specified capacity. * @param targetCapacity the desired size of the buffer * @throws IllegalArgumentException if the given capacity is smaller than * the actual size of the content stored in the buffer already * @see ResizableByteArrayOutputStream#size() */ public synchronized void resize(int targetCapacity) { Assert.isTrue(targetCapacity >= this.count, "New capacity must not be smaller than current size"); byte[] resizedBuffer = new byte[targetCapacity]; System.arraycopy(this.buf, 0, resizedBuffer, 0, this.count); this.buf = resizedBuffer; } /** * Grow the internal buffer size. * @param additionalCapacity the number of bytes to add to the current buffer size * @see ResizableByteArrayOutputStream#size() */ public synchronized void grow(int additionalCapacity) { Assert.isTrue(additionalCapacity >= 0, "Additional capacity must be 0 or higher"); if (this.count + additionalCapacity > this.buf.length) { int newCapacity = Math.max(this.buf.length * 2, this.count + additionalCapacity); resize(newCapacity); } } /** * Return the current size of this stream's internal buffer. */ public synchronized int capacity() { return this.buf.length; } }
qobel/esoguproject
spring-framework/spring-core/src/main/java/org/springframework/util/ResizableByteArrayOutputStream.java
Java
apache-2.0
3,366
/** * Copyright IBM Corp. 2016, 2018 * * This source code is licensed under the Apache-2.0 license found in the * LICENSE file in the root directory of this source tree. */ import React, { useState, useCallback, useEffect } from 'react'; import classnames from 'classnames'; import { settings } from 'carbon-components'; import FileUploaderItem from '../FileUploaderItem'; import FileUploaderDropContainer from '../FileUploaderDropContainer'; import FormItem from '../../FormItem'; import uid from '../../../tools/uniqueId'; import '../FileUploader-story.scss'; const { prefix } = settings; const ExampleDropContainerApp = (props) => { const [files, setFiles] = useState([]); const handleDrop = (e) => { e.preventDefault(); }; const handleDragover = (e) => { e.preventDefault(); }; useEffect(() => { document.addEventListener('drop', handleDrop); document.addEventListener('dragover', handleDragover); return () => { document.removeEventListener('drop', handleDrop); document.removeEventListener('dragover', handleDragover); }; }, []); const uploadFile = async (fileToUpload) => { // file size validation if (fileToUpload.filesize > 512000) { const updatedFile = { ...fileToUpload, status: 'edit', iconDescription: 'Delete file', invalid: true, errorSubject: 'File size exceeds limit', errorBody: '500kb max file size. Select a new file and try again.', }; setFiles((files) => files.map((file) => file.uuid === fileToUpload.uuid ? updatedFile : file ) ); return; } // file type validation if (fileToUpload.invalidFileType) { const updatedFile = { ...fileToUpload, status: 'edit', iconDescription: 'Delete file', invalid: true, errorSubject: 'Invalid file type', errorBody: `"${fileToUpload.name}" does not have a valid file type.`, }; setFiles((files) => files.map((file) => file.uuid === fileToUpload.uuid ? updatedFile : file ) ); return; } // simulate network request time const rand = Math.random() * 1000; setTimeout(() => { const updatedFile = { ...fileToUpload, status: 'complete', iconDescription: 'Upload complete', }; setFiles((files) => files.map((file) => file.uuid === fileToUpload.uuid ? updatedFile : file ) ); }, rand); // show x icon after 1 second setTimeout(() => { const updatedFile = { ...fileToUpload, status: 'edit', iconDescription: 'Delete file', }; setFiles((files) => files.map((file) => file.uuid === fileToUpload.uuid ? updatedFile : file ) ); }, rand + 1000); }; const onAddFiles = useCallback( (evt, { addedFiles }) => { evt.stopPropagation(); const newFiles = addedFiles.map((file) => ({ uuid: uid(), name: file.name, filesize: file.size, status: 'uploading', iconDescription: 'Uploading', invalidFileType: file.invalidFileType, })); // eslint-disable-next-line react/prop-types if (props.multiple) { setFiles([...files, ...newFiles]); newFiles.forEach(uploadFile); } else if (newFiles[0]) { setFiles([newFiles[0]]); uploadFile(newFiles[0]); } }, // eslint-disable-next-line react/prop-types [files, props.multiple] ); const handleFileUploaderItemClick = useCallback( (_, { uuid: clickedUuid }) => setFiles(files.filter(({ uuid }) => clickedUuid !== uuid)), [files] ); const labelClasses = classnames(`${prefix}--file--label`, { // eslint-disable-next-line react/prop-types [`${prefix}--file--label--disabled`]: props.disabled, }); const helperTextClasses = classnames(`${prefix}--label-description`, { // eslint-disable-next-line react/prop-types [`${prefix}--label-description--disabled`]: props.disabled, }); return ( <FormItem> <p className={labelClasses}>Upload files</p> <p className={helperTextClasses}> Max file size is 500kb. Supported file types are .jpg and .png. </p> <FileUploaderDropContainer {...props} onAddFiles={onAddFiles} /> <div className={`${prefix}--file-container`} style={{ width: '100%' }}> {files.map( ({ uuid, name, filesize, status, iconDescription, invalid, ...rest }) => ( <FileUploaderItem key={uid()} uuid={uuid} name={name} filesize={filesize} // eslint-disable-next-line react/prop-types size={props.size} status={status} iconDescription={iconDescription} invalid={invalid} onDelete={handleFileUploaderItemClick} {...rest} /> ) )} </div> </FormItem> ); }; export default ExampleDropContainerApp;
carbon-design-system/carbon-components
packages/react/src/components/FileUploader/stories/drop-container.js
JavaScript
apache-2.0
5,193
//catalan数的应用 形如f(n) = f(n-1) + f(n-2)f(1) + f(n-3)f(2) + … + f(1)f(n-2) + f(n-1)的特征结构 int numTrees(int n) { int a = n+1; int b = n; long ret = 1; for(int i = 1; i <= b; i++) { ret = ret * a; ret = ret / i; a++; } return ret / (n+1); }
MingfeiPan/leetcode
tree/96.c
C
apache-2.0
313
# Pavonia hastata var. hastata VARIETY #### Status ACCEPTED #### According to NUB Generator [autonym] #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Malvales/Malvaceae/Pavonia/Pavonia hastata/Pavonia hastata hastata/README.md
Markdown
apache-2.0
170
using System; using System.Threading; namespace mappingrpc { public class CallResultFuture { public Type resultType; public bool done = false; public bool isExceptionResult = false; public object result; public object monitorLock = new System.Object(); public CallResultFuture(){ } public object getResult(){ lock (monitorLock) { Monitor.Wait (monitorLock); } return result; } public void putResult(object result){ this.result = result; done = true; lock (monitorLock) { Monitor.PulseAll (monitorLock); } } } }
zhoufenglokki/mappingrpc
csharp/mappingrpc/CallResultFuture.cs
C#
apache-2.0
569
#include "envswitch.h" #include "dynrm.h" #include "resqueuecommand.h" #include "miscadmin.h" #include "communication/rmcomm_QD2RM.h" #include "utils/linkedlist.h" #include "catalog/pg_resqueue.h" #include "utils/resscheduler.h" #include "commands/defrem.h" /******************************************************************************* * This file contains all functions for creating, altering and dropping resource * queue through SQL DDL statement. All statement information is saved in the * argument stmt. ******************************************************************************/ void validateDDLAttributeOptions(List *options); /* * CREATE RESOURCE QUEUE statement handler. */ void createResourceQueue(CreateQueueStmt *stmt) { int res = FUNC_RETURN_OK; static char errorbuf[1024]; Relation pg_resqueue_rel; cqContext cqc; /* Permission check - only superuser can create queues. */ if (!superuser()) { ereport(ERROR, (errcode(ERRCODE_INSUFFICIENT_PRIVILEGE), errmsg("must be superuser to create resource queues"))); } /* * MPP-7960: We cannot run CREATE RESOURCE QUEUE inside a user transaction * block because the shared memory structures are not cleaned up on abort, * resulting in "leaked", unreachable queues. */ if (Gp_role == GP_ROLE_DISPATCH) { PreventTransactionChain((void *) stmt, "CREATE RESOURCE QUEUE"); } /* Validate options. */ validateDDLAttributeOptions(stmt->options); /* * Check for an illegal name ('none' is used to signify no queue in ALTER * ROLE). */ if (strcmp(stmt->queue, "none") == 0) { ereport(ERROR, (errcode(ERRCODE_RESERVED_NAME), errmsg("resource queue name %s is reserved", stmt->queue), errOmitLocation(true))); } /* * Check the pg_resqueue relation to be certain the queue doesn't already * exist. */ pg_resqueue_rel = heap_open(ResQueueRelationId, RowExclusiveLock); if (caql_getcount( caql_addrel(cqclr(&cqc), pg_resqueue_rel), cql("SELECT COUNT(*) FROM pg_resqueue WHERE rsqname = :1", CStringGetDatum(stmt->queue)))) { ereport(ERROR, (errcode(ERRCODE_DUPLICATE_OBJECT), errmsg("resource queue %s already exists", stmt->queue))); } heap_close(pg_resqueue_rel, NoLock); /* * Build the create resource queue request and send it to HAWQ RM process. * Basically, HAWQ RM runs all necessary logic to verify the statement and * apply the change. Therefore, QD only sends out the original information * and waits for the response. */ int resourceid = 0; res = createNewResourceContext(&resourceid); if ( res != FUNC_RETURN_OK ) { Assert( res == COMM2RM_CLIENT_FULL_RESOURCECONTEXT ); ereport(ERROR, (errcode(ERRCODE_INTERNAL_ERROR), errmsg("can not apply CREATE RESOURCE QUEUE, " "because too many resource contexts were created."))); } /* Here, using user oid is more convenient. */ res = registerConnectionInRMByOID(resourceid, GetUserId(), errorbuf, sizeof(errorbuf)); if ( res != FUNC_RETURN_OK ) { releaseResourceContextWithErrorReport(resourceid); ereport(ERROR, (errcode(ERRCODE_INTERNAL_ERROR), errmsg("%s", errorbuf))); } res = manipulateResourceQueue(resourceid, stmt->queue, MANIPULATE_RESQUEUE_CREATE, stmt->options, errorbuf, sizeof(errorbuf)); /* We always unregister connection. */ unregisterConnectionInRMWithErrorReport(resourceid); /* We always release resource context. */ releaseResourceContextWithErrorReport(resourceid); if ( res != FUNC_RETURN_OK ) { ereport(ERROR, (errcode(IS_TO_RM_RPC_ERROR(res) ? ERRCODE_INTERNAL_ERROR : ERRCODE_INVALID_OBJECT_DEFINITION), errmsg("can not apply CREATE RESOURCE QUEUE because %s", errorbuf))); } elog(LOG, "Complete applying CREATE RESOURCE QUEUE statement."); } /******************************************************************************* * DROP RESOURCE QUEUE statement handler. * stmt[in] The parsed statement tree. ******************************************************************************/ void dropResourceQueue(DropQueueStmt *stmt) { int res = FUNC_RETURN_OK; char errorbuf[1024]; Relation pg_resqueue_rel; HeapTuple tuple; cqContext cqc; cqContext *pcqCtx; Oid queueid; /* Permission check - only superuser can create queues. */ if (!superuser()) { ereport(ERROR, (errcode(ERRCODE_INSUFFICIENT_PRIVILEGE), errmsg("must be superuser to create resource queues"))); } /* Cannot DROP default and root queue */ if ( strcmp(stmt->queue, RESOURCE_QUEUE_DEFAULT_QUEUE_NAME) == 0 || strcmp(stmt->queue, RESOURCE_QUEUE_ROOT_QUEUE_NAME) == 0 ) { ereport(ERROR, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("cannot drop system resource queue %s", stmt->queue))); } /* * Check the pg_resqueue relation to be certain the queue already * exists. */ pg_resqueue_rel = heap_open(ResQueueRelationId, RowExclusiveLock); pcqCtx = caql_addrel(cqclr(&cqc), pg_resqueue_rel); tuple = caql_getfirst(pcqCtx, cql("SELECT * FROM pg_resqueue WHERE rsqname = :1 FOR UPDATE", CStringGetDatum(stmt->queue))); if (!HeapTupleIsValid(tuple)) ereport(ERROR, (errcode(ERRCODE_UNDEFINED_OBJECT), errmsg("resource queue %s does not exist", stmt->queue))); /* Remember the Oid */ queueid = HeapTupleGetOid(tuple); /* * Check to see if any roles are in this queue. */ if (caql_getcount( NULL, cql("SELECT COUNT(*) FROM pg_authid WHERE rolresqueue = :1", ObjectIdGetDatum(queueid)))) { ereport(ERROR, (errcode(ERRCODE_DEPENDENT_OBJECTS_STILL_EXIST), errmsg("resource queue %s is used by at least one role", stmt->queue))); } heap_close(pg_resqueue_rel, NoLock); /* * MPP-7960: We cannot run DROP RESOURCE QUEUE inside a user transaction * block because the shared memory structures are not cleaned up on abort, * resulting in "leaked", unreachable queues. */ if (Gp_role == GP_ROLE_DISPATCH) { PreventTransactionChain((void *) stmt, "DROP RESOURCE QUEUE"); } /* * Build the drop resource queue request and send it to HAWQ RM process. * Basically, HAWQ RM runs all necessary logic to verify the statement and * apply the change. Therefore, QD only sends out the original information * and waits for the response. */ int resourceid = 0; res = createNewResourceContext(&resourceid); if ( res != FUNC_RETURN_OK ) { Assert( res == COMM2RM_CLIENT_FULL_RESOURCECONTEXT ); ereport(ERROR, (errcode(ERRCODE_INTERNAL_ERROR), errmsg("cannot apply DROP RESOURCE QUEUE, " "because too many resource contexts were created."))); } /* Here, using user oid is more convenient. */ res = registerConnectionInRMByOID(resourceid, GetUserId(), errorbuf, sizeof(errorbuf)); if ( res != FUNC_RETURN_OK ) { releaseResourceContextWithErrorReport(resourceid); ereport(ERROR, (errcode(ERRCODE_INTERNAL_ERROR), errmsg("%s", errorbuf))); } res = manipulateResourceQueue(resourceid, stmt->queue, MANIPULATE_RESQUEUE_DROP, NULL, errorbuf, sizeof(errorbuf)); /* We always unregister connection. */ unregisterConnectionInRMWithErrorReport(resourceid); /* We always release resource context. */ releaseResourceContextWithErrorReport(resourceid); if ( res != FUNC_RETURN_OK ) { ereport(ERROR, (errcode(IS_TO_RM_RPC_ERROR(res) ? ERRCODE_INTERNAL_ERROR : ERRCODE_INVALID_OBJECT_DEFINITION), errmsg("can not apply DROP RESOURCE QUEUE because %s", errorbuf))); } elog(LOG, "Completed applying DROP RESOURCE QUEUE statement."); } /******************************************************************************* * ALTER RESOURCE QUEUE statement handler. * stmt[in] The parsed statement tree. ******************************************************************************/ void alterResourceQueue(AlterQueueStmt *stmt) { int res = FUNC_RETURN_OK; static char errorbuf[1024]; Relation pg_resqueue_rel; cqContext cqc; /* Permission check - only superuser can create queues. */ if (!superuser()) { ereport(ERROR, (errcode(ERRCODE_INSUFFICIENT_PRIVILEGE), errmsg("must be superuser to create resource queues"))); } /* Cannot DROP default and root queue */ if ( strcmp(stmt->queue, RESOURCE_QUEUE_ROOT_QUEUE_NAME) == 0 ) { ereport(ERROR, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("cannot alter system resource queue %s", stmt->queue))); } /* * MPP-7960: We cannot run ALTER RESOURCE QUEUE inside a user transaction * block because the shared memory structures are not cleaned up on abort, * resulting in "leaked", unreachable queues. */ if (Gp_role == GP_ROLE_DISPATCH) { PreventTransactionChain((void *) stmt, "ALTER RESOURCE QUEUE"); } /* Validate options. */ validateDDLAttributeOptions(stmt->options); /* * Check if resource queue exists */ pg_resqueue_rel = heap_open(ResQueueRelationId, RowExclusiveLock); if (caql_getcount( caql_addrel(cqclr(&cqc), pg_resqueue_rel), cql("SELECT COUNT(*) FROM pg_resqueue WHERE rsqname = :1", CStringGetDatum(stmt->queue))) == 0) { ereport(ERROR, (errcode(ERRCODE_DUPLICATE_OBJECT), errmsg("resource queue %s does not exist", stmt->queue))); } heap_close(pg_resqueue_rel, NoLock); /* * Build the alter resource queue request and send it to HAWQ RM process. * Basically, HAWQ RM runs all necessary logic to verify the statement and * apply the change. Therefore, QD only sends out the original information * and waits for the response. */ int resourceid = 0; res = createNewResourceContext(&resourceid); if (res != FUNC_RETURN_OK) { Assert(res == COMM2RM_CLIENT_FULL_RESOURCECONTEXT); ereport(ERROR, (errcode(ERRCODE_INTERNAL_ERROR), errmsg("too many existing resource context."))); } /* Here, using user oid is more convenient. */ res = registerConnectionInRMByOID(resourceid, GetUserId(), errorbuf, sizeof(errorbuf)); if ( res != FUNC_RETURN_OK ) { releaseResourceContextWithErrorReport(resourceid); ereport(ERROR, (errcode(ERRCODE_INTERNAL_ERROR), errmsg("%s", errorbuf))); } res = manipulateResourceQueue(resourceid, stmt->queue, MANIPULATE_RESQUEUE_ALTER, stmt->options, errorbuf, sizeof(errorbuf)); /* We always unregister connection. */ unregisterConnectionInRMWithErrorReport(resourceid); /* We always release resource context. */ releaseResourceContextWithErrorReport(resourceid); if ( res != FUNC_RETURN_OK ) { ereport(ERROR, (errcode(IS_TO_RM_RPC_ERROR(res) ? ERRCODE_INTERNAL_ERROR : ERRCODE_INVALID_OBJECT_DEFINITION), errmsg("cannot apply ALTER RESOURCE QUEUE because %s", errorbuf))); } elog(LOG, "Completed applying ALTER RESOURCE QUEUE statement."); } #define VALID_DDL_DUP(index, defel, targref) \ if (strcmp((defel)->defname, RSQDDLAttrNames[(index)]) == 0) \ { \ if ((targref) != NULL) \ { \ ereport(ERROR, \ (errcode(ERRCODE_SYNTAX_ERROR), \ errmsg("redundant attribute %s", \ RSQDDLAttrNames[(index)]))); \ } \ (targref) = (defel); \ continue; \ } void validateDDLAttributeOptions(List *options) { DefElem *dparent = NULL; DefElem *dactivelimit = NULL; DefElem *dmemorylimit = NULL; DefElem *dcorelimit = NULL; DefElem *dvsegresquota = NULL; DefElem *dallocpolicy = NULL; DefElem *dresovercommit = NULL; DefElem *dnvsegupperlimit = NULL; DefElem *dnvseglowerlimit = NULL; DefElem *dnvsegupperlimitpseg = NULL; DefElem *dnvseglowerlimitpseg = NULL; Cost activelimit = INVALID_RES_LIMIT_THRESHOLD; ListCell *option = NULL; /* Extract options from the statement node tree, check duplicate options. */ foreach(option, options) { DefElem *defel = (DefElem *) lfirst(option); VALID_DDL_DUP(RSQ_DDL_ATTR_PARENT, defel, dparent) VALID_DDL_DUP(RSQ_DDL_ATTR_ACTIVE_STATMENTS, defel, dactivelimit) VALID_DDL_DUP(RSQ_DDL_ATTR_MEMORY_LIMIT_CLUSTER, defel, dmemorylimit) VALID_DDL_DUP(RSQ_DDL_ATTR_CORE_LIMIT_CLUSTER, defel, dcorelimit) VALID_DDL_DUP(RSQ_DDL_ATTR_VSEG_RESOURCE_QUOTA, defel, dvsegresquota) VALID_DDL_DUP(RSQ_DDL_ATTR_ALLOCATION_POLICY, defel, dallocpolicy) VALID_DDL_DUP(RSQ_DDL_ATTR_RESOURCE_OVERCOMMIT_FACTOR, defel, dresovercommit) VALID_DDL_DUP(RSQ_DDL_ATTR_NVSEG_UPPER_LIMIT, defel, dnvsegupperlimit) VALID_DDL_DUP(RSQ_DDL_ATTR_NVSEG_LOWER_LIMIT, defel, dnvseglowerlimit) VALID_DDL_DUP(RSQ_DDL_ATTR_NVSEG_UPPER_LIMIT_PERSEG, defel, dnvsegupperlimitpseg) VALID_DDL_DUP(RSQ_DDL_ATTR_NVSEG_LOWER_LIMIT_PERSEG, defel, dnvseglowerlimitpseg) } /* Perform range checks on the various thresholds.*/ if (dactivelimit) { activelimit = (Cost) defGetInt64(dactivelimit); if (!(activelimit == INVALID_RES_LIMIT_THRESHOLD || (activelimit > 0))) ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("active threshold cannot be less than %d or equal to 0", INVALID_RES_LIMIT_THRESHOLD))); } /* Memory and core expression must be the same. */ if (dmemorylimit && dcorelimit) { bool need_free_mem = false; bool need_free_core = false; char *memory_limit = defGetString(dmemorylimit, &need_free_mem); char *core_limit = defGetString(dcorelimit, &need_free_core); if (memory_limit != NULL && core_limit != NULL) { if(strcmp(memory_limit, core_limit) != 0) { if(need_free_mem) { free(memory_limit); } if(need_free_core) { free(core_limit); } ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("the values of %s and %s must be same", RSQDDLAttrNames[RSQ_DDL_ATTR_MEMORY_LIMIT_CLUSTER], RSQDDLAttrNames[RSQ_DDL_ATTR_CORE_LIMIT_CLUSTER]))); } } else { if(need_free_mem) { free(memory_limit); } if(need_free_core) { free(core_limit); } ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("invalid value of %s or %s", RSQDDLAttrNames[RSQ_DDL_ATTR_MEMORY_LIMIT_CLUSTER], RSQDDLAttrNames[RSQ_DDL_ATTR_CORE_LIMIT_CLUSTER]))); } } /* * NVSEG_UPPER_LIMIT/NVSEG_LOWER_LIMIT has 0 as default value that means the * setting is not effective, otherwise, it must be greater than 0. */ int64_t nvsegupperlimit = -1; int64_t nvseglowerlimit = -1; if (dnvsegupperlimit != NULL) { nvsegupperlimit = defGetInt64(dnvsegupperlimit); if (nvsegupperlimit < MINIMUM_RESQUEUE_NVSEG_UPPER_LIMIT_N) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("%s cannot be less than %s", RSQDDLAttrNames[RSQ_DDL_ATTR_NVSEG_UPPER_LIMIT], MINIMUM_RESQUEUE_NVSEG_UPPER_LIMIT))); } } if (dnvseglowerlimit != NULL) { nvseglowerlimit = defGetInt64(dnvseglowerlimit); if (nvseglowerlimit < MINIMUM_RESQUEUE_NVSEG_LOWER_LIMIT_N) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("%s cannot be less than %s", RSQDDLAttrNames[RSQ_DDL_ATTR_NVSEG_LOWER_LIMIT], MINIMUM_RESQUEUE_NVSEG_LOWER_LIMIT))); } } /* * NVSEG_UPPER_LIMIT_PERSEG/NVSEG_LOWER_LIMIT_PERSEG has 0 as default value * that means the setting is not effective, otherwise, it must be greater * than 0. */ double nvsegupperlimitpseg = -1.0; double nvseglowerlimitpseg = -1.0; if (dnvsegupperlimitpseg != NULL) { nvsegupperlimitpseg = defGetNumeric(dnvsegupperlimitpseg); if (nvsegupperlimitpseg < MINIMUM_RESQUEUE_NVSEG_UPPER_PERSEG_LIMIT_N) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("%s cannot be less than %s", RSQDDLAttrNames[RSQ_DDL_ATTR_NVSEG_UPPER_LIMIT_PERSEG], MINIMUM_RESQUEUE_NVSEG_UPPER_PERSEG_LIMIT))); } } if (dnvseglowerlimitpseg != NULL) { nvseglowerlimitpseg = defGetNumeric(dnvseglowerlimitpseg); if (nvseglowerlimitpseg < MINIMUM_RESQUEUE_NVSEG_LOWER_PERSEG_LIMIT_N) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("%s cannot be less than %s", RSQDDLAttrNames[RSQ_DDL_ATTR_NVSEG_LOWER_LIMIT_PERSEG], MINIMUM_RESQUEUE_NVSEG_LOWER_PERSEG_LIMIT))); } } /* The resource upper factor must be no less than 1. */ if( dresovercommit != NULL) { double resovercommit = defGetNumeric(dresovercommit); if (resovercommit < MINIMUM_RESQUEUE_OVERCOMMIT_N) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("%s cannot be less than %s", RSQDDLAttrNames[RSQ_DDL_ATTR_RESOURCE_OVERCOMMIT_FACTOR], MINIMUM_RESQUEUE_OVERCOMMIT))); } } }
hornn/interviews
src/backend/resourcemanager/resqueuecommand.c
C
apache-2.0
16,970
/* * Muhimbi PDF * * Convert, Merge, Watermark, Secure and OCR files. * * OpenAPI spec version: 9.15 * * Generated by: https://github.com/swagger-api/swagger-codegen.git */ using System; using System.Linq; using System.IO; using System.Text; using System.Text.RegularExpressions; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; using System.ComponentModel.DataAnnotations; namespace Muhimbi.PDF.Online.Client.Model { /// <summary> /// Parameters for CopyMetadata operation /// </summary> [DataContract] public partial class CopyMetadataData : IEquatable<CopyMetadataData>, IValidatableObject { /// <summary> /// Initializes a new instance of the <see cref="CopyMetadataData" /> class. /// </summary> [JsonConstructorAttribute] protected CopyMetadataData() { } /// <summary> /// Initializes a new instance of the <see cref="CopyMetadataData" /> class. /// </summary> /// <param name="SiteUrl">SharePoint site url (example: http://contoso.sharepoint.com/sites/mysite) (required).</param> /// <param name="SourceFileUrl">Path to the source file (required).</param> /// <param name="DestinationFileUrl">Path to the destination file (required).</param> /// <param name="Username">User name to log in to the SharePoint site.</param> /// <param name="Password">Password to log in to the SharePoint site.</param> /// <param name="CopyFields">Optional comma separated list of fields.</param> /// <param name="ContentType">Optional content type for the destination file.</param> /// <param name="FailOnError">Fail on error (default to true).</param> public CopyMetadataData(string SiteUrl = default(string), string SourceFileUrl = default(string), string DestinationFileUrl = default(string), string Username = default(string), string Password = default(string), string CopyFields = default(string), string ContentType = default(string), bool? FailOnError = true) { // to ensure "SiteUrl" is required (not null) if (SiteUrl == null) { throw new InvalidDataException("SiteUrl is a required property for CopyMetadataData and cannot be null"); } else { this.SiteUrl = SiteUrl; } // to ensure "SourceFileUrl" is required (not null) if (SourceFileUrl == null) { throw new InvalidDataException("SourceFileUrl is a required property for CopyMetadataData and cannot be null"); } else { this.SourceFileUrl = SourceFileUrl; } // to ensure "DestinationFileUrl" is required (not null) if (DestinationFileUrl == null) { throw new InvalidDataException("DestinationFileUrl is a required property for CopyMetadataData and cannot be null"); } else { this.DestinationFileUrl = DestinationFileUrl; } this.Username = Username; this.Password = Password; this.CopyFields = CopyFields; this.ContentType = ContentType; // use default value if no "FailOnError" provided if (FailOnError == null) { this.FailOnError = true; } else { this.FailOnError = FailOnError; } } /// <summary> /// SharePoint site url (example: http://contoso.sharepoint.com/sites/mysite) /// </summary> /// <value>SharePoint site url (example: http://contoso.sharepoint.com/sites/mysite)</value> [DataMember(Name="site_url", EmitDefaultValue=false)] public string SiteUrl { get; set; } /// <summary> /// Path to the source file /// </summary> /// <value>Path to the source file</value> [DataMember(Name="source_file_url", EmitDefaultValue=false)] public string SourceFileUrl { get; set; } /// <summary> /// Path to the destination file /// </summary> /// <value>Path to the destination file</value> [DataMember(Name="destination_file_url", EmitDefaultValue=false)] public string DestinationFileUrl { get; set; } /// <summary> /// User name to log in to the SharePoint site /// </summary> /// <value>User name to log in to the SharePoint site</value> [DataMember(Name="username", EmitDefaultValue=false)] public string Username { get; set; } /// <summary> /// Password to log in to the SharePoint site /// </summary> /// <value>Password to log in to the SharePoint site</value> [DataMember(Name="password", EmitDefaultValue=false)] public string Password { get; set; } /// <summary> /// Optional comma separated list of fields /// </summary> /// <value>Optional comma separated list of fields</value> [DataMember(Name="copy_fields", EmitDefaultValue=false)] public string CopyFields { get; set; } /// <summary> /// Optional content type for the destination file /// </summary> /// <value>Optional content type for the destination file</value> [DataMember(Name="content_type", EmitDefaultValue=false)] public string ContentType { get; set; } /// <summary> /// Fail on error /// </summary> /// <value>Fail on error</value> [DataMember(Name="fail_on_error", EmitDefaultValue=false)] public bool? FailOnError { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class CopyMetadataData {\n"); sb.Append(" SiteUrl: ").Append(SiteUrl).Append("\n"); sb.Append(" SourceFileUrl: ").Append(SourceFileUrl).Append("\n"); sb.Append(" DestinationFileUrl: ").Append(DestinationFileUrl).Append("\n"); sb.Append(" Username: ").Append(Username).Append("\n"); sb.Append(" Password: ").Append(Password).Append("\n"); sb.Append(" CopyFields: ").Append(CopyFields).Append("\n"); sb.Append(" ContentType: ").Append(ContentType).Append("\n"); sb.Append(" FailOnError: ").Append(FailOnError).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="obj">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object obj) { // credit: http://stackoverflow.com/a/10454552/677735 return this.Equals(obj as CopyMetadataData); } /// <summary> /// Returns true if CopyMetadataData instances are equal /// </summary> /// <param name="other">Instance of CopyMetadataData to be compared</param> /// <returns>Boolean</returns> public bool Equals(CopyMetadataData other) { // credit: http://stackoverflow.com/a/10454552/677735 if (other == null) return false; return ( this.SiteUrl == other.SiteUrl || this.SiteUrl != null && this.SiteUrl.Equals(other.SiteUrl) ) && ( this.SourceFileUrl == other.SourceFileUrl || this.SourceFileUrl != null && this.SourceFileUrl.Equals(other.SourceFileUrl) ) && ( this.DestinationFileUrl == other.DestinationFileUrl || this.DestinationFileUrl != null && this.DestinationFileUrl.Equals(other.DestinationFileUrl) ) && ( this.Username == other.Username || this.Username != null && this.Username.Equals(other.Username) ) && ( this.Password == other.Password || this.Password != null && this.Password.Equals(other.Password) ) && ( this.CopyFields == other.CopyFields || this.CopyFields != null && this.CopyFields.Equals(other.CopyFields) ) && ( this.ContentType == other.ContentType || this.ContentType != null && this.ContentType.Equals(other.ContentType) ) && ( this.FailOnError == other.FailOnError || this.FailOnError != null && this.FailOnError.Equals(other.FailOnError) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { // credit: http://stackoverflow.com/a/263416/677735 unchecked // Overflow is fine, just wrap { int hash = 41; // Suitable nullity checks etc, of course :) if (this.SiteUrl != null) hash = hash * 59 + this.SiteUrl.GetHashCode(); if (this.SourceFileUrl != null) hash = hash * 59 + this.SourceFileUrl.GetHashCode(); if (this.DestinationFileUrl != null) hash = hash * 59 + this.DestinationFileUrl.GetHashCode(); if (this.Username != null) hash = hash * 59 + this.Username.GetHashCode(); if (this.Password != null) hash = hash * 59 + this.Password.GetHashCode(); if (this.CopyFields != null) hash = hash * 59 + this.CopyFields.GetHashCode(); if (this.ContentType != null) hash = hash * 59 + this.ContentType.GetHashCode(); if (this.FailOnError != null) hash = hash * 59 + this.FailOnError.GetHashCode(); return hash; } } public IEnumerable<ValidationResult> Validate(ValidationContext validationContext) { yield break; } } }
Muhimbi/PDF-Converter-Services-Online
clients/v1/csharp/client/src/Muhimbi.PDF.Online.Client/Model/CopyMetadataData.cs
C#
apache-2.0
11,226
drop table if exists data_attribute_file_info cascade; drop table if exists data_object_attribute_file_info cascade; -- 데이터 속성 파일 관리 create table data_attribute_file_info( data_attribute_file_info_id bigint, data_id bigint, user_id varchar(32) not null, file_name varchar(100) not null, file_real_name varchar(100) not null, file_path varchar(256) not null, file_size varchar(12) not null, file_ext varchar(10) not null, total_count bigint default 0, parse_success_count bigint default 0, parse_error_count bigint default 0, insert_success_count bigint default 0, insert_error_count bigint default 0, insert_date timestamp with time zone default now(), constraint data_attribute_file_info_pk primary key (data_attribute_file_info_id) ); comment on table data_attribute_file_info is '데이터 속성 파일 관리'; comment on column data_attribute_file_info.data_attribute_file_info_id is '고유번호'; comment on column data_attribute_file_info.data_id is '데이터 고유번호'; comment on column data_attribute_file_info.user_id is '사용자 아이디'; comment on column data_attribute_file_info.file_name is '파일 이름'; comment on column data_attribute_file_info.file_real_name is '파일 실제 이름'; comment on column data_attribute_file_info.file_path is '파일 경로'; comment on column data_attribute_file_info.file_size is '파일 사이즈'; comment on column data_attribute_file_info.file_ext is '파일 확장자'; comment on column data_attribute_file_info.total_count is '전체 데이터 건수'; comment on column data_attribute_file_info.parse_success_count is '파싱 성공 건수'; comment on column data_attribute_file_info.parse_error_count is '파싱 오류'; comment on column data_attribute_file_info.insert_success_count is 'SQL Insert 성공 건수'; comment on column data_attribute_file_info.insert_error_count is 'SQL Insert 실패 건수'; comment on column data_attribute_file_info.insert_date is '등록일'; -- 데이터 Object 속성 파일 관리 create table data_object_attribute_file_info ( data_object_attribute_file_info_id bigint, data_id bigint, user_id varchar(32) not null, file_name varchar(100) not null, file_real_name varchar(100) not null, file_path varchar(256) not null, file_size varchar(12) not null, file_ext varchar(10) not null, total_count bigint default 0, parse_success_count bigint default 0, parse_error_count bigint default 0, insert_success_count bigint default 0, insert_error_count bigint default 0, insert_date timestamp with time zone default now(), constraint data_object_attribute_file_info_pk primary key (data_object_attribute_file_info_id) ); comment on table data_object_attribute_file_info is '데이터 Object 속성 파일 관리'; comment on column data_object_attribute_file_info.data_object_attribute_file_info_id is '고유번호'; comment on column data_object_attribute_file_info.data_id is '데이터 고유번호'; comment on column data_object_attribute_file_info.user_id is '사용자 아이디'; comment on column data_object_attribute_file_info.file_name is '파일 이름'; comment on column data_object_attribute_file_info.file_real_name is '파일 실제 이름'; comment on column data_object_attribute_file_info.file_path is '파일 경로'; comment on column data_object_attribute_file_info.file_size is '파일 사이즈'; comment on column data_object_attribute_file_info.file_ext is '파일 확장자'; comment on column data_object_attribute_file_info.total_count is '전체 데이터 건수'; comment on column data_object_attribute_file_info.parse_success_count is '파싱 성공 건수'; comment on column data_object_attribute_file_info.parse_error_count is '파싱 오류'; comment on column data_object_attribute_file_info.insert_success_count is 'SQL Insert 성공 건수'; comment on column data_object_attribute_file_info.insert_error_count is 'SQL Insert 실패 건수'; comment on column data_object_attribute_file_info.insert_date is '등록일';
Gaia3D/mago3d
doc/database/ddl/data_attribute_file_info.sql
SQL
apache-2.0
4,256
/* * Licensed under the Apache License, Version 2.0 (the "License"); * * You may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * * Contributions from 2013-2017 where performed either by US government * employees, or under US Veterans Health Administration contracts. * * US Veterans Health Administration contributions by government employees * are work of the U.S. Government and are not subject to copyright * protection in the United States. Portions contributed by government * employees are USGovWork (17USC §105). Not subject to copyright. * * Contribution by contractors to the US Veterans Health Administration * during this period are contractually contributed under the * Apache License, Version 2.0. * * See: https://www.usa.gov/government-works * * Contributions prior to 2013: * * Copyright (C) International Health Terminology Standards Development Organisation. * Licensed under the Apache License, Version 2.0. * */ package sh.isaac.model.logic.node; //~--- JDK imports ------------------------------------------------------------ //~--- non-JDK imports -------------------------------------------------------- import sh.isaac.api.DataTarget; import sh.isaac.api.externalizable.ByteArrayDataBuffer; import sh.isaac.api.logic.LogicNode; import sh.isaac.model.logic.LogicalExpressionImpl; //~--- classes ---------------------------------------------------------------- /** * Created by kec on 12/12/14. */ public abstract class LiteralNode extends AbstractLogicNode { /** * Instantiates a new literal node. * * @param logicGraphVersion the logic graph version */ public LiteralNode(LogicalExpressionImpl logicGraphVersion) { super(logicGraphVersion); } /** * Instantiates a new literal node. * * @param logicGraphVersion the logic graph version * @param dataInputStream the data input stream */ public LiteralNode(LogicalExpressionImpl logicGraphVersion, ByteArrayDataBuffer dataInputStream) { super(logicGraphVersion, dataInputStream); } //~--- methods ------------------------------------------------------------- /** * Adds the children. * * @param children the children */ @Override public final void addChildren(LogicNode... children) { throw new UnsupportedOperationException(); } /** * To string. * * @return the string */ @Override public String toString() { return toString(""); } /** * To string. * * @param nodeIdSuffix the node id suffix * @return the string */ @Override public String toString(String nodeIdSuffix) { return super.toString(nodeIdSuffix); } @Override public String toSimpleString() { return super.toSimpleString(); } /** * Write node data. * * @param dataOutput the data output * @param dataTarget the data target */ @Override protected void writeNodeData(ByteArrayDataBuffer dataOutput, DataTarget dataTarget) { super.writeData(dataOutput, dataTarget); } //~--- get methods --------------------------------------------------------- /** * Gets the children. * * @return the children */ @Override public final AbstractLogicNode[] getChildren() { return new AbstractLogicNode[0]; } @Override public final void removeChild(short childId) { // nothing to do... } }
OSEHRA/ISAAC
core/model/src/main/java/sh/isaac/model/logic/node/LiteralNode.java
Java
apache-2.0
3,914
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flume; import org.apache.flume.lifecycle.LifecycleAware; /** * <p> * A channel connects a {@link Source} to a {@link Sink}. The source * acts as producer while the sink acts as a consumer of events. The channel * itself is the buffer between the two. * </p> * <p> * A channel exposes a {@link Transaction} interface that can be used by * its clients to ensure atomic {@linkplain #put(Event) put} and * {@linkplain #take() take} semantics. * This is necessary to guarantee single hop reliability between agents. * For instance, a source will successfully produce an {@linkplain Event event} * if and only if that event can be committed to the source's associated * channel. Similarly, a sink will consume an event if and * only if its respective endpoint can accept the event. The * extent of transaction support varies for different channel implementations * ranging from strong to best-effort semantics. * </p> * <p> * Channels are associated with unique {@linkplain NamedComponent names} that * can be used for separating configuration and working namespaces. * </p> * <p> * Channels must be thread safe, protecting any internal invariants as no * guarantees are given as to when and by how many sources/sinks they may * be simultaneously accessed by. * </p> * * @see org.apache.flume.Source * @see org.apache.flume.Sink * @see org.apache.flume.Transaction */ public interface Channel extends LifecycleAware, NamedComponent { /** * <p>Puts the given event into the channel.</p> * <p><strong>Note</strong>: This method must be invoked within an active * {@link Transaction} boundary. Failure to do so can lead to unpredictable * results.</p> * @param event the event to transport. * @throws ChannelException in case this operation fails. * @see org.apache.flume.Transaction#begin() */ public void put(Event event) throws ChannelException; /** * <p>Returns the next event from the channel if available. If the channel * does not have any events available, this method must return {@code null}. * </p> * <p><strong>Note</strong>: This method must be invoked within an active * {@link Transaction} boundary. Failure to do so can lead to unpredictable * results.</p> * @return the next available event or {@code null} if no events are * available. * @throws ChannelException in case this operation fails. * @see org.apache.flume.Transaction#begin() */ public Event take() throws ChannelException; /** * @return the transaction instance associated with this channel. */ public Transaction getTransaction(); }
lfzCarlosC/test
flume-ng-core/src/main/java/org/apache/flume/Channel.java
Java
apache-2.0
3,442
<!DOCTYPE html> <html> <head> <meta charset='utf-8'> <title>AngularJs</title> <link href="//netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.min.css" rel="stylesheet"> <body ng-app="CrudApp"> <p>hello from Spring boot 5 </p> <div class="container"> <div ng-view></div> </div> <script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/angularjs/1.0.7/angular.min.js"></script> <script type="text/javascript" src="/js/app.js"></script> </body> </html>
amoAHCP/openshift-workshop
spring-boot-CRUD-admin-step5/src/main/resources/static/index.html
HTML
apache-2.0
488
package org.jsmart.zerocode.core.domain; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.jsmart.zerocode.core.kafka.client.BasicKafkaClient; @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) @Inherited public @interface UseKafkaClient { /** * @return a Kafka Client implementation class which will override the default implementation */ Class<? extends BasicKafkaClient> value(); }
authorjapps/zerocode
core/src/main/java/org/jsmart/zerocode/core/domain/UseKafkaClient.java
Java
apache-2.0
576
/* * Configuration.java * Created by: Scott A. Roehrig * Created on: Aug 17, 2016 at 9:31:09 PM */ package org.apache.bazaar.web.config; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.util.Properties; import javax.validation.constraints.NotNull; import org.apache.bazaar.Bazaar; import org.apache.bazaar.Bid; import org.apache.bazaar.Bidder; import org.apache.bazaar.Category; import org.apache.bazaar.Item; import org.apache.bazaar.web.BazaarCollectionMessageBodyReaderImpl; import org.apache.bazaar.web.BazaarCollectionMessageBodyWriterImpl; import org.apache.bazaar.web.BazaarMessageBodyReaderImpl; import org.apache.bazaar.web.BazaarMessageBodyWriterImpl; import org.apache.bazaar.web.BidCollectionMessageBodyReaderImpl; import org.apache.bazaar.web.BidCollectionMessageBodyWriterImpl; import org.apache.bazaar.web.BidMessageBodyReaderImpl; import org.apache.bazaar.web.BidMessageBodyWriterImpl; import org.apache.bazaar.web.BidderCollectionMessageBodyReaderImpl; import org.apache.bazaar.web.BidderCollectionMessageBodyWriterImpl; import org.apache.bazaar.web.BidderMessageBodyReaderImpl; import org.apache.bazaar.web.BidderMessageBodyWriterImpl; import org.apache.bazaar.web.CategoryCollectionMessageBodyReaderImpl; import org.apache.bazaar.web.CategoryCollectionMessageBodyWriterImpl; import org.apache.bazaar.web.CategoryMessageBodyReaderImpl; import org.apache.bazaar.web.CategoryMessageBodyWriterImpl; import org.apache.bazaar.web.ItemCollectionMessageBodyReaderImpl; import org.apache.bazaar.web.ItemCollectionMessageBodyWriterImpl; import org.apache.bazaar.web.ItemMessageBodyReaderImpl; import org.apache.bazaar.web.ItemMessageBodyWriterImpl; import org.apache.bazaar.web.ThrowableMessageBodyReaderImpl; import org.apache.bazaar.web.ThrowableMessageBodyWriterImpl; import org.apache.bazaar.web.VersionCollectionMessageBodyReaderImpl; import org.apache.bazaar.web.VersionCollectionMessageBodyWriterImpl; import org.apache.bazaar.web.VersionMessageBodyReaderImpl; import org.apache.bazaar.web.VersionMessageBodyWriterImpl; /** * Configuration extends Configuration and declares the methods an * implementation must provide */ public class Configuration extends org.apache.bazaar.config.Configuration { // declare members /** * The key for retrieval of the {@link Bazaar} web service url value */ public static final String BAZAAR_REST_WEB_SERVICE_URL = Bazaar.class.getName() + "." + "restwebservice.url"; /** * The key for retrieval of the {@link Item} web service url value */ public static final String ITEM_REST_WEB_SERVICE_URL = Item.class.getName() + "." + "restwebservice.url"; /** * The key for retrieval of the {@link Category} web service url value */ public static final String CATEGORY_REST_WEB_SERVICE_URL = Category.class.getName() + "." + "restwebservice.url"; /** * The key for retrieval of the {@link Bidder} web service url value */ public static final String BIDDER_REST_WEB_SERVICE_URL = Bidder.class.getName() + "." + "restwebservice.url"; /** * The key for retrieval of the {@link Bid} web service url value */ public static final String BID_REST_WEB_SERVICE_URL = Bid.class.getName() + "." + "restwebservice.url"; /** * List of public provider classes */ public static final Class<?>[] PROVIDER_CLASSES = new Class<?>[] { BazaarMessageBodyReaderImpl.class, BazaarMessageBodyWriterImpl.class, BazaarCollectionMessageBodyReaderImpl.class, BazaarCollectionMessageBodyWriterImpl.class, CategoryMessageBodyReaderImpl.class, CategoryMessageBodyWriterImpl.class, CategoryCollectionMessageBodyReaderImpl.class, CategoryCollectionMessageBodyWriterImpl.class, ItemMessageBodyReaderImpl.class, ItemMessageBodyWriterImpl.class, ItemCollectionMessageBodyReaderImpl.class, ItemCollectionMessageBodyWriterImpl.class, BidderMessageBodyReaderImpl.class, BidderMessageBodyWriterImpl.class, BidderCollectionMessageBodyReaderImpl.class, BidderCollectionMessageBodyWriterImpl.class, BidMessageBodyReaderImpl.class, BidMessageBodyWriterImpl.class, BidCollectionMessageBodyReaderImpl.class, BidCollectionMessageBodyWriterImpl.class, VersionMessageBodyWriterImpl.class, VersionMessageBodyReaderImpl.class, VersionCollectionMessageBodyWriterImpl.class, VersionCollectionMessageBodyReaderImpl.class, ThrowableMessageBodyReaderImpl.class, ThrowableMessageBodyWriterImpl.class }; protected static final Properties PROPERTIES; static { try (final InputStream inputStream = Configuration.class .getResourceAsStream("/org/apache/bazaar/web/config/configuration.properties")) { if (inputStream == null) { throw new ExceptionInInitializerError(); } PROPERTIES = new Properties(org.apache.bazaar.config.Configuration.PROPERTIES); Configuration.PROPERTIES.load(new BufferedReader(new InputStreamReader(inputStream, Charset.forName(org.apache.bazaar.config.Configuration.DEFAULT_ENCODING)))); } catch (final IOException exception) { throw new ExceptionInInitializerError(exception); } } // declare constructors /** * Constructor for Configuration * * @param properties The configuration properties */ protected Configuration(@NotNull final Properties properties) { super(properties); } // declare methods /** * Factory method for obtaining instance * * @return Instance of configuration */ public static @NotNull org.apache.bazaar.config.Configuration newInstance() { return new Configuration(Configuration.PROPERTIES); } }
saroehr/Bazaar
BazaarWebClient/src/org/apache/bazaar/web/config/Configuration.java
Java
apache-2.0
5,744
package ${package}.dao.impl.es; import graphene.dao.TransactionDAO; import graphene.model.query.BasicQuery; import graphene.model.view.events.DirectedEventRow; import graphene.util.G_CallBack; import java.util.List; public class TransactionDAOESImpl implements TransactionDAO<Object, BasicQuery> { @Override public List<Object> findByQuery(BasicQuery pq) throws Exception { // TODO Auto-generated method stub return null; } @Override public List<Object> getAll(long offset, long maxResults) throws Exception { // TODO Auto-generated method stub return null; } @Override public long count(BasicQuery q) throws Exception { // TODO Auto-generated method stub return 0; } @Override public boolean isReady() { // TODO Auto-generated method stub return false; } @Override public void setReady(boolean b) { // TODO Auto-generated method stub } @Override public double getReadiness() { // TODO Auto-generated method stub return 0; } @Override public boolean performCallback(long offset, long maxResults, G_CallBack<Object,BasicQuery> cb, BasicQuery q) { // TODO Auto-generated method stub return false; } @Override public long countEdges(String id) throws Exception { // TODO Auto-generated method stub return 0; } @Override public List<DirectedEventRow> getEvents(BasicQuery q) { // TODO Auto-generated method stub return null; } @Override public DirectedEventRow findEventById(String id) { // TODO Auto-generated method stub return null; } }
codeaudit/graphene
graphene-parent/graphene-archetype/src/main/resources/archetype-resources/src/main/java/dao/impl/es/TransactionDAOESImpl.java
Java
apache-2.0
1,522
/* * Licensed to the Apache Software Foundation (ASF) Under one or more * contributor license agreements. See the NOTICE file distributed with * this work for Additional information regarding copyright ownership. * The ASF licenses this file to You Under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed Under the License Is distributed on an "AS Is" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations Under the License. */ /* * Created on May 15, 2005 * */ namespace NPOI.SS.Formula.Functions { using System; using NPOI.SS.Formula.Eval; internal class Npv : Function { [Obsolete] public ValueEval Evaluate(int srcRowIndex, int srcColumnIndex, ValueEval arg0, ValueEval arg1) { double result; try { double rate = NumericFunction.SingleOperandEvaluate(arg0, srcRowIndex, srcColumnIndex); double d1 = NumericFunction.SingleOperandEvaluate(arg1, srcRowIndex, srcColumnIndex); result = Evaluate(rate, d1); NumericFunction.CheckValue(result); } catch (EvaluationException e) { return e.GetErrorEval(); } return new NumberEval(result); } [Obsolete] public ValueEval Evaluate(int srcRowIndex, int srcColumnIndex, ValueEval arg0, ValueEval arg1, ValueEval arg2) { double result; try { double rate = NumericFunction.SingleOperandEvaluate(arg0, srcRowIndex, srcColumnIndex); double d1 = NumericFunction.SingleOperandEvaluate(arg1, srcRowIndex, srcColumnIndex); double d2 = NumericFunction.SingleOperandEvaluate(arg2, srcRowIndex, srcColumnIndex); result = Evaluate(rate, d1, d2); NumericFunction.CheckValue(result); } catch (EvaluationException e) { return e.GetErrorEval(); } return new NumberEval(result); } [Obsolete] public ValueEval Evaluate(int srcRowIndex, int srcColumnIndex, ValueEval arg0, ValueEval arg1, ValueEval arg2, ValueEval arg3) { double result; try { double rate = NumericFunction.SingleOperandEvaluate(arg0, srcRowIndex, srcColumnIndex); double d1 = NumericFunction.SingleOperandEvaluate(arg1, srcRowIndex, srcColumnIndex); double d2 = NumericFunction.SingleOperandEvaluate(arg2, srcRowIndex, srcColumnIndex); double d3 = NumericFunction.SingleOperandEvaluate(arg3, srcRowIndex, srcColumnIndex); result = Evaluate(rate, d1, d2, d3); NumericFunction.CheckValue(result); } catch (EvaluationException e) { return e.GetErrorEval(); } return new NumberEval(result); } public ValueEval Evaluate(ValueEval[] args, int srcRowIndex, int srcColumnIndex) { int nArgs = args.Length; if (nArgs < 2) { return ErrorEval.VALUE_INVALID; } try { double rate = NumericFunction.SingleOperandEvaluate(args[0], srcRowIndex, srcColumnIndex); // convert tail arguments into an array of doubles ValueEval[] vargs = new ValueEval[args.Length - 1]; Array.Copy(args, 1, vargs, 0, vargs.Length); double[] values = AggregateFunction.ValueCollector.CollectValues(vargs); double result = FinanceLib.npv(rate, values); NumericFunction.CheckValue(result); return new NumberEval(result); } catch (EvaluationException e) { return e.GetErrorEval(); } } private static double Evaluate(double rate, params double[] ds) { double sum = 0; for (int i = 0; i < ds.Length; i++) { sum += ds[i] / Math.Pow(rate + 1, i); } return sum; } } }
treenew/sofire
src/Core/Sofire.Extends/Excel/NPOI/SS/Formula/Functions/Npv.cs
C#
apache-2.0
4,725
/* * Copyright 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.internal.artifacts.ivyservice.resolveengine.result; import com.google.common.base.Objects; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import org.gradle.api.Describable; import org.gradle.api.artifacts.result.ComponentSelectionCause; import org.gradle.api.artifacts.result.ComponentSelectionDescriptor; import java.util.ArrayDeque; import java.util.Collections; import java.util.Iterator; import java.util.List; public class VersionSelectionReasons { public static final ComponentSelectionDescriptorInternal REQUESTED = new DefaultComponentSelectionDescriptor(ComponentSelectionCause.REQUESTED); public static final ComponentSelectionDescriptorInternal ROOT = new DefaultComponentSelectionDescriptor(ComponentSelectionCause.ROOT); public static final ComponentSelectionDescriptorInternal FORCED = new DefaultComponentSelectionDescriptor(ComponentSelectionCause.FORCED); public static final ComponentSelectionDescriptorInternal CONFLICT_RESOLUTION = new DefaultComponentSelectionDescriptor(ComponentSelectionCause.CONFLICT_RESOLUTION); public static final ComponentSelectionDescriptorInternal SELECTED_BY_RULE = new DefaultComponentSelectionDescriptor(ComponentSelectionCause.SELECTED_BY_RULE); public static final ComponentSelectionDescriptorInternal COMPOSITE_BUILD = new DefaultComponentSelectionDescriptor(ComponentSelectionCause.COMPOSITE_BUILD); public static final ComponentSelectionDescriptorInternal CONSTRAINT = new DefaultComponentSelectionDescriptor(ComponentSelectionCause.CONSTRAINT); public static final ComponentSelectionDescriptorInternal REJECTION = new DefaultComponentSelectionDescriptor(ComponentSelectionCause.REJECTION); public static ComponentSelectionReasonInternal requested() { return new DefaultComponentSelectionReason(REQUESTED); } public static ComponentSelectionReasonInternal empty() { return new DefaultComponentSelectionReason(Collections.<ComponentSelectionDescriptor>emptyList()); } public static ComponentSelectionReasonInternal root() { return new DefaultComponentSelectionReason(ROOT); } public static ComponentSelectionReasonInternal of(List<? extends ComponentSelectionDescriptor> descriptions) { return new DefaultComponentSelectionReason(descriptions); } public static ComponentSelectionReasonInternal of(ComponentSelectionDescriptor descriptions) { return new DefaultComponentSelectionReason(descriptions); } public static boolean isCauseExpected(ComponentSelectionDescriptor descriptor) { return descriptor.getCause() == ComponentSelectionCause.REQUESTED || descriptor.getCause() == ComponentSelectionCause.ROOT; } private static class DefaultComponentSelectionReason implements ComponentSelectionReasonInternal { private final ArrayDeque<ComponentSelectionDescriptorInternal> descriptions; private DefaultComponentSelectionReason(ComponentSelectionDescriptor description) { descriptions = new ArrayDeque<ComponentSelectionDescriptorInternal>(1); descriptions.add((ComponentSelectionDescriptorInternal) description); } public DefaultComponentSelectionReason(List<? extends ComponentSelectionDescriptor> descriptions) { this.descriptions = new ArrayDeque<ComponentSelectionDescriptorInternal>(1); for (ComponentSelectionDescriptor description : descriptions) { addCause(description); } } public boolean isForced() { return hasCause(ComponentSelectionCause.FORCED); } private boolean hasCause(ComponentSelectionCause cause) { for (ComponentSelectionDescriptor description : descriptions) { if (description.getCause() == cause) { return true; } } return false; } public boolean isConflictResolution() { return hasCause(ComponentSelectionCause.CONFLICT_RESOLUTION); } public boolean isSelectedByRule() { return hasCause(ComponentSelectionCause.SELECTED_BY_RULE); } public boolean isExpected() { return isCauseExpected(Iterables.getLast(descriptions)); } public boolean isCompositeSubstitution() { return hasCause(ComponentSelectionCause.COMPOSITE_BUILD); } public String getDescription() { // for backwards compatibility, we use the last added description return descriptions.getLast().toString(); } public String toString() { return getDescription(); } @Override public ComponentSelectionReasonInternal addCause(ComponentSelectionCause cause, Describable description) { addCause(new DefaultComponentSelectionDescriptor(cause, description)); return this; } @Override public ComponentSelectionReasonInternal setCause(ComponentSelectionDescriptor description) { descriptions.clear(); addCause(description); return this; } @Override public ComponentSelectionReasonInternal addCause(ComponentSelectionDescriptor description) { ComponentSelectionDescriptorInternal descriptor = (ComponentSelectionDescriptorInternal) description; if (!descriptions.contains(descriptor)) { descriptions.add(descriptor); } return this; } @Override public List<ComponentSelectionDescriptor> getDescriptions() { return ImmutableList.<ComponentSelectionDescriptor>copyOf(descriptions); } @Override public boolean isConstrained() { return hasCause(ComponentSelectionCause.CONSTRAINT); } @Override public boolean hasCustomDescriptions() { for (ComponentSelectionDescriptorInternal description : descriptions) { if (description.hasCustomDescription()) { return true; } } return false; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } DefaultComponentSelectionReason that = (DefaultComponentSelectionReason) o; return sameDescriptorsAs(that); } private boolean sameDescriptorsAs(DefaultComponentSelectionReason that) { if (descriptions.size() != that.descriptions.size()) { return false; } Iterator<ComponentSelectionDescriptorInternal> it1 = descriptions.iterator(); Iterator<ComponentSelectionDescriptorInternal> it2 = descriptions.iterator(); while (it1.hasNext()) { if (!it1.next().equals(it2.next())) { return false; } } return true; } @Override public int hashCode() { return Objects.hashCode(descriptions); } } }
lsmaira/gradle
subprojects/dependency-management/src/main/java/org/gradle/api/internal/artifacts/ivyservice/resolveengine/result/VersionSelectionReasons.java
Java
apache-2.0
7,918
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include <aws/chime/model/UpdateVoiceConnectorGroupRequest.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Chime::Model; using namespace Aws::Utils::Json; using namespace Aws::Utils; UpdateVoiceConnectorGroupRequest::UpdateVoiceConnectorGroupRequest() : m_voiceConnectorGroupIdHasBeenSet(false), m_nameHasBeenSet(false), m_voiceConnectorItemsHasBeenSet(false) { } Aws::String UpdateVoiceConnectorGroupRequest::SerializePayload() const { JsonValue payload; if(m_nameHasBeenSet) { payload.WithString("Name", m_name); } if(m_voiceConnectorItemsHasBeenSet) { Array<JsonValue> voiceConnectorItemsJsonList(m_voiceConnectorItems.size()); for(unsigned voiceConnectorItemsIndex = 0; voiceConnectorItemsIndex < voiceConnectorItemsJsonList.GetLength(); ++voiceConnectorItemsIndex) { voiceConnectorItemsJsonList[voiceConnectorItemsIndex].AsObject(m_voiceConnectorItems[voiceConnectorItemsIndex].Jsonize()); } payload.WithArray("VoiceConnectorItems", std::move(voiceConnectorItemsJsonList)); } return payload.View().WriteReadable(); }
cedral/aws-sdk-cpp
aws-cpp-sdk-chime/source/model/UpdateVoiceConnectorGroupRequest.cpp
C++
apache-2.0
1,706
# Celmisia sessiliflora var. sessiliflora VARIETY #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Celmisia sessiliflora/ Syn. Celmisia sessiliflora sessiliflora/README.md
Markdown
apache-2.0
196
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.neptune.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeDBClusterParameterGroups" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeDBClusterParameterGroupsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of a specific DB cluster parameter group to return details for. * </p> * <p> * Constraints: * </p> * <ul> * <li> * <p> * If supplied, must match the name of an existing DBClusterParameterGroup. * </p> * </li> * </ul> */ private String dBClusterParameterGroupName; /** * <p> * This parameter is not currently supported. * </p> */ private java.util.List<Filter> filters; /** * <p> * The maximum number of records to include in the response. If more records exist than the specified * <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the * remaining results can be retrieved. * </p> * <p> * Default: 100 * </p> * <p> * Constraints: Minimum 20, maximum 100. * </p> */ private Integer maxRecords; /** * <p> * An optional pagination token provided by a previous <code>DescribeDBClusterParameterGroups</code> request. If * this parameter is specified, the response includes only records beyond the marker, up to the value specified by * <code>MaxRecords</code>. * </p> */ private String marker; /** * <p> * The name of a specific DB cluster parameter group to return details for. * </p> * <p> * Constraints: * </p> * <ul> * <li> * <p> * If supplied, must match the name of an existing DBClusterParameterGroup. * </p> * </li> * </ul> * * @param dBClusterParameterGroupName * The name of a specific DB cluster parameter group to return details for.</p> * <p> * Constraints: * </p> * <ul> * <li> * <p> * If supplied, must match the name of an existing DBClusterParameterGroup. * </p> * </li> */ public void setDBClusterParameterGroupName(String dBClusterParameterGroupName) { this.dBClusterParameterGroupName = dBClusterParameterGroupName; } /** * <p> * The name of a specific DB cluster parameter group to return details for. * </p> * <p> * Constraints: * </p> * <ul> * <li> * <p> * If supplied, must match the name of an existing DBClusterParameterGroup. * </p> * </li> * </ul> * * @return The name of a specific DB cluster parameter group to return details for.</p> * <p> * Constraints: * </p> * <ul> * <li> * <p> * If supplied, must match the name of an existing DBClusterParameterGroup. * </p> * </li> */ public String getDBClusterParameterGroupName() { return this.dBClusterParameterGroupName; } /** * <p> * The name of a specific DB cluster parameter group to return details for. * </p> * <p> * Constraints: * </p> * <ul> * <li> * <p> * If supplied, must match the name of an existing DBClusterParameterGroup. * </p> * </li> * </ul> * * @param dBClusterParameterGroupName * The name of a specific DB cluster parameter group to return details for.</p> * <p> * Constraints: * </p> * <ul> * <li> * <p> * If supplied, must match the name of an existing DBClusterParameterGroup. * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeDBClusterParameterGroupsRequest withDBClusterParameterGroupName(String dBClusterParameterGroupName) { setDBClusterParameterGroupName(dBClusterParameterGroupName); return this; } /** * <p> * This parameter is not currently supported. * </p> * * @return This parameter is not currently supported. */ public java.util.List<Filter> getFilters() { return filters; } /** * <p> * This parameter is not currently supported. * </p> * * @param filters * This parameter is not currently supported. */ public void setFilters(java.util.Collection<Filter> filters) { if (filters == null) { this.filters = null; return; } this.filters = new java.util.ArrayList<Filter>(filters); } /** * <p> * This parameter is not currently supported. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setFilters(java.util.Collection)} or {@link #withFilters(java.util.Collection)} if you want to override * the existing values. * </p> * * @param filters * This parameter is not currently supported. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeDBClusterParameterGroupsRequest withFilters(Filter... filters) { if (this.filters == null) { setFilters(new java.util.ArrayList<Filter>(filters.length)); } for (Filter ele : filters) { this.filters.add(ele); } return this; } /** * <p> * This parameter is not currently supported. * </p> * * @param filters * This parameter is not currently supported. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeDBClusterParameterGroupsRequest withFilters(java.util.Collection<Filter> filters) { setFilters(filters); return this; } /** * <p> * The maximum number of records to include in the response. If more records exist than the specified * <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the * remaining results can be retrieved. * </p> * <p> * Default: 100 * </p> * <p> * Constraints: Minimum 20, maximum 100. * </p> * * @param maxRecords * The maximum number of records to include in the response. If more records exist than the specified * <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the * remaining results can be retrieved.</p> * <p> * Default: 100 * </p> * <p> * Constraints: Minimum 20, maximum 100. */ public void setMaxRecords(Integer maxRecords) { this.maxRecords = maxRecords; } /** * <p> * The maximum number of records to include in the response. If more records exist than the specified * <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the * remaining results can be retrieved. * </p> * <p> * Default: 100 * </p> * <p> * Constraints: Minimum 20, maximum 100. * </p> * * @return The maximum number of records to include in the response. If more records exist than the specified * <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the * remaining results can be retrieved.</p> * <p> * Default: 100 * </p> * <p> * Constraints: Minimum 20, maximum 100. */ public Integer getMaxRecords() { return this.maxRecords; } /** * <p> * The maximum number of records to include in the response. If more records exist than the specified * <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the * remaining results can be retrieved. * </p> * <p> * Default: 100 * </p> * <p> * Constraints: Minimum 20, maximum 100. * </p> * * @param maxRecords * The maximum number of records to include in the response. If more records exist than the specified * <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the * remaining results can be retrieved.</p> * <p> * Default: 100 * </p> * <p> * Constraints: Minimum 20, maximum 100. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeDBClusterParameterGroupsRequest withMaxRecords(Integer maxRecords) { setMaxRecords(maxRecords); return this; } /** * <p> * An optional pagination token provided by a previous <code>DescribeDBClusterParameterGroups</code> request. If * this parameter is specified, the response includes only records beyond the marker, up to the value specified by * <code>MaxRecords</code>. * </p> * * @param marker * An optional pagination token provided by a previous <code>DescribeDBClusterParameterGroups</code> request. * If this parameter is specified, the response includes only records beyond the marker, up to the value * specified by <code>MaxRecords</code>. */ public void setMarker(String marker) { this.marker = marker; } /** * <p> * An optional pagination token provided by a previous <code>DescribeDBClusterParameterGroups</code> request. If * this parameter is specified, the response includes only records beyond the marker, up to the value specified by * <code>MaxRecords</code>. * </p> * * @return An optional pagination token provided by a previous <code>DescribeDBClusterParameterGroups</code> * request. If this parameter is specified, the response includes only records beyond the marker, up to the * value specified by <code>MaxRecords</code>. */ public String getMarker() { return this.marker; } /** * <p> * An optional pagination token provided by a previous <code>DescribeDBClusterParameterGroups</code> request. If * this parameter is specified, the response includes only records beyond the marker, up to the value specified by * <code>MaxRecords</code>. * </p> * * @param marker * An optional pagination token provided by a previous <code>DescribeDBClusterParameterGroups</code> request. * If this parameter is specified, the response includes only records beyond the marker, up to the value * specified by <code>MaxRecords</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeDBClusterParameterGroupsRequest withMarker(String marker) { setMarker(marker); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDBClusterParameterGroupName() != null) sb.append("DBClusterParameterGroupName: ").append(getDBClusterParameterGroupName()).append(","); if (getFilters() != null) sb.append("Filters: ").append(getFilters()).append(","); if (getMaxRecords() != null) sb.append("MaxRecords: ").append(getMaxRecords()).append(","); if (getMarker() != null) sb.append("Marker: ").append(getMarker()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeDBClusterParameterGroupsRequest == false) return false; DescribeDBClusterParameterGroupsRequest other = (DescribeDBClusterParameterGroupsRequest) obj; if (other.getDBClusterParameterGroupName() == null ^ this.getDBClusterParameterGroupName() == null) return false; if (other.getDBClusterParameterGroupName() != null && other.getDBClusterParameterGroupName().equals(this.getDBClusterParameterGroupName()) == false) return false; if (other.getFilters() == null ^ this.getFilters() == null) return false; if (other.getFilters() != null && other.getFilters().equals(this.getFilters()) == false) return false; if (other.getMaxRecords() == null ^ this.getMaxRecords() == null) return false; if (other.getMaxRecords() != null && other.getMaxRecords().equals(this.getMaxRecords()) == false) return false; if (other.getMarker() == null ^ this.getMarker() == null) return false; if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDBClusterParameterGroupName() == null) ? 0 : getDBClusterParameterGroupName().hashCode()); hashCode = prime * hashCode + ((getFilters() == null) ? 0 : getFilters().hashCode()); hashCode = prime * hashCode + ((getMaxRecords() == null) ? 0 : getMaxRecords().hashCode()); hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); return hashCode; } @Override public DescribeDBClusterParameterGroupsRequest clone() { return (DescribeDBClusterParameterGroupsRequest) super.clone(); } }
jentfoo/aws-sdk-java
aws-java-sdk-neptune/src/main/java/com/amazonaws/services/neptune/model/DescribeDBClusterParameterGroupsRequest.java
Java
apache-2.0
15,228
// @flow import React, { Component } from "react"; import PropTypes from "prop-types"; import { bindActionCreators } from "redux"; import { connect } from "react-redux"; import * as actions from "./actions"; import { defaultStyles } from "../../styles/default-styles"; import { StyleSheet, View, TouchableOpacity, Text } from "react-native"; import Toggle from "../../components/toggle/toggle"; import circleTurquoise from "../../assets/images/circle-turquoise.png"; import circleBlue from "../../assets/images/circle-blue.png"; import circleRed from "../../assets/images/circle-red.png"; import circleYellow from "../../assets/images/circle-yellow.png"; import circleGreen from "../../assets/images/circle-green.png"; import circleOrange from "../../assets/images/circle-orange.png"; const myStyles = { toggle: { height: 50 } }; const combinedStyles = Object.assign({}, defaultStyles, myStyles); const styles = StyleSheet.create(combinedStyles); class TrashToggles extends Component { static propTypes = { actions: PropTypes.object, close: PropTypes.func, messages: PropTypes.array, navigation: PropTypes.object, supplyPickupToggle: PropTypes.bool, uncollectedTrashToggle: PropTypes.bool, trashDropOffToggle: PropTypes.bool, myTrashToggle: PropTypes.bool, collectedTrashToggle: PropTypes.bool, cleanAreasToggle: PropTypes.bool }; constructor(props) { super(props); } render() { return ( <View style={ [styles.frame, { paddingTop: 30 }] }> <View style={ [styles.singleButtonHeader, { backgroundColor: "#EEE", marginTop: 10 }] }> <View style={ styles.buttonBar }> <View style={ styles.buttonBarButton }> <TouchableOpacity style={ styles.headerButton } onPress={ this.props.close }> <Text style={ styles.headerButtonText }>{"Close"}</Text> </TouchableOpacity> </View> </View> </View> <View style={ [styles.infoBlockContainer, { height: 300 }] }> <Toggle style={ styles.toggle } icon={ circleYellow } label="My Trash" value={ this.props.myTrashToggle } onValueChange={ () => this.props.actions.toggleTrashData("myTrashToggle", !this.props.myTrashToggle) }/> <Toggle style={ styles.toggle } icon={ circleRed } label="Uncollected Trash" value={ this.props.uncollectedTrashToggle } onValueChange={ () => this.props.actions.toggleTrashData("uncollectedTrashToggle", !this.props.uncollectedTrashToggle) }/> <Toggle style={ styles.toggle } icon={ circleBlue } label="Trash Drop-Offs" value={ this.props.trashDropOffToggle } onValueChange={ () => this.props.actions.toggleTrashData("trashDropOffToggle", !this.props.trashDropOffToggle) }/> <Toggle style={ styles.toggle } icon={ circleGreen } label="Supply Pickups" value={ this.props.supplyPickupToggle } onValueChange={ () => this.props.actions.toggleTrashData("supplyPickupToggle", !this.props.supplyPickupToggle) }/> <Toggle style={ styles.toggle } icon={ circleTurquoise } label="Collected Trash" value={ this.props.collectedTrashToggle } onValueChange={ () => this.props.actions.toggleTrashData("collectedTrashToggle", !this.props.collectedTrashToggle) }/> <Toggle style={ styles.toggle } icon={ circleOrange } label="Team Cleaning Areas" value={ this.props.cleanAreasToggle } onValueChange={ () => this.props.actions.toggleTrashData("cleanAreasToggle", !this.props.cleanAreasToggle) }/> </View> </View> ); } } function mapStateToProps(state) { const collectedTrashToggle = state.trashTracker.collectedTrashToggle; const supplyPickupToggle = state.trashTracker.supplyPickupToggle; const uncollectedTrashToggle = state.trashTracker.uncollectedTrashToggle; const trashDropOffToggle = state.trashTracker.trashDropOffToggle; const myTrashToggle = state.trashTracker.myTrashToggle; const cleanAreasToggle = state.trashTracker.cleanAreasToggle; return { collectedTrashToggle, supplyPickupToggle, uncollectedTrashToggle, trashDropOffToggle, myTrashToggle, cleanAreasToggle }; } function mapDispatchToProps(dispatch) { return { actions: bindActionCreators(actions, dispatch) }; } export default connect(mapStateToProps, mapDispatchToProps)(TrashToggles);
johnneed/GreenUpVermont
screens/trash-tracker-screen/trash-toggles.js
JavaScript
apache-2.0
5,277
/* Copyright 2018 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using ArcGIS.Desktop.Framework; //added references using ArcGIS.Desktop.Core; using ArcGIS.Desktop.Layouts; using ArcGIS.Desktop.Mapping; using ArcGIS.Desktop.Framework.Threading.Tasks; using ArcGIS.Desktop.Framework.Contracts; using ArcGIS.Core.CIM; using ArcGIS.Core.Geometry; using ArcGIS.Core.Data; namespace ProSnippetsTasks { class Snippets { public async void Examples() { #region Create a Scene Layer var sceneLayerUrl = @"https://myportal.com/server/rest/services/Hosted/SceneLayerServiceName/SceneServer"; //portal items also ok as long as the portal is the current active portal... //var sceneLayerUrl = @"https://myportal.com/home/item.html?id=123456789abcdef1234567890abcdef0"; await QueuedTask.Run(() => { //Create with initial visibility set to false. Add to current scene var createparams = new LayerCreationParams(new Uri(sceneLayerUrl, UriKind.Absolute)) { IsVisible = false }; //cast to specific type of scene layer being created - in this case FeatureSceneLayer var sceneLayer = LayerFactory.Instance.CreateLayer<Layer>(createparams, MapView.Active.Map) as FeatureSceneLayer; //or...specify the cast directly var sceneLayer2 = LayerFactory.Instance.CreateLayer<FeatureSceneLayer>(createparams, MapView.Active.Map); //ditto for BuildingSceneLayer, PointCloudSceneLayer, IntegratedMeshSceneLayer //... }); #endregion } } }
Esri/arcgis-pro-sdk
Examples/SceneLayers/ProSnippets.cs
C#
apache-2.0
2,211
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_03) on Mon Sep 10 14:25:59 CDT 2012 --> <meta http-equiv="Content-Type" content="text/html" charset="UTF-8"> <title>Uses of Interface org.eclipse.jetty.annotations.ClassNameResolver (Jetty :: Aggregate :: All core Jetty 8.1.7.v20120910 API)</title> <meta name="date" content="2012-09-10"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Interface org.eclipse.jetty.annotations.ClassNameResolver (Jetty :: Aggregate :: All core Jetty 8.1.7.v20120910 API)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/eclipse/jetty/annotations//class-useClassNameResolver.html" target="_top">Frames</a></li> <li><a href="ClassNameResolver.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Interface org.eclipse.jetty.annotations.ClassNameResolver" class="title">Uses of Interface<br>org.eclipse.jetty.annotations.ClassNameResolver</h2> </div> <div class="classUseContainer"> <ul class="blockList"> <li class="blockList"> <table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation"> <caption><span>Packages that use <a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">ClassNameResolver</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Package</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="#org.eclipse.jetty.annotations">org.eclipse.jetty.annotations</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"> <ul class="blockList"> <li class="blockList"><a name="org.eclipse.jetty.annotations"> <!-- --> </a> <h3>Uses of <a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">ClassNameResolver</a> in <a href="../../../../../org/eclipse/jetty/annotations/package-summary.html">org.eclipse.jetty.annotations</a></h3> <table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../org/eclipse/jetty/annotations/package-summary.html">org.eclipse.jetty.annotations</a> with parameters of type <a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">ClassNameResolver</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><span class="strong">AnnotationParser.</span><code><strong><a href="../../../../../org/eclipse/jetty/annotations/AnnotationParser.html#parse(java.lang.Class, org.eclipse.jetty.annotations.ClassNameResolver, boolean)">parse</a></strong>(<a href="http://download.oracle.com/javase/6/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&nbsp;clazz, <a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">ClassNameResolver</a>&nbsp;resolver, boolean&nbsp;visitSuperClasses)</code>&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><span class="strong">AnnotationParser.</span><code><strong><a href="../../../../../org/eclipse/jetty/annotations/AnnotationParser.html#parse(java.lang.ClassLoader, boolean, boolean, org.eclipse.jetty.annotations.ClassNameResolver)">parse</a></strong>(<a href="http://download.oracle.com/javase/6/docs/api/java/lang/ClassLoader.html?is-external=true" title="class or interface in java.lang">ClassLoader</a>&nbsp;loader, boolean&nbsp;visitParents, boolean&nbsp;nullInclusive, <a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">ClassNameResolver</a>&nbsp;resolver)</code> <div class="block">Find annotations on classes in the supplied classloader.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><span class="strong">AnnotationParser.</span><code><strong><a href="../../../../../org/eclipse/jetty/annotations/AnnotationParser.html#parse(java.util.List, org.eclipse.jetty.annotations.ClassNameResolver)">parse</a></strong>(<a href="http://download.oracle.com/javase/6/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;classNames, <a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">ClassNameResolver</a>&nbsp;resolver)</code>&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><span class="strong">AnnotationParser.</span><code><strong><a href="../../../../../org/eclipse/jetty/annotations/AnnotationParser.html#parse(org.eclipse.jetty.util.resource.Resource, org.eclipse.jetty.annotations.ClassNameResolver)">parse</a></strong>(<a href="../../../../../org/eclipse/jetty/util/resource/Resource.html" title="class in org.eclipse.jetty.util.resource">Resource</a>&nbsp;dir, <a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">ClassNameResolver</a>&nbsp;resolver)</code>&nbsp;</td> </tr> <tr class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><span class="strong">AnnotationParser.</span><code><strong><a href="../../../../../org/eclipse/jetty/annotations/AnnotationParser.html#parse(java.lang.String[], org.eclipse.jetty.annotations.ClassNameResolver)">parse</a></strong>(<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;classNames, <a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">ClassNameResolver</a>&nbsp;resolver)</code>&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><span class="strong">AnnotationParser.</span><code><strong><a href="../../../../../org/eclipse/jetty/annotations/AnnotationParser.html#parse(java.lang.String, org.eclipse.jetty.annotations.ClassNameResolver)">parse</a></strong>(<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;className, <a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">ClassNameResolver</a>&nbsp;resolver)</code>&nbsp;</td> </tr> <tr class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><span class="strong">AnnotationParser.</span><code><strong><a href="../../../../../org/eclipse/jetty/annotations/AnnotationParser.html#parse(java.net.URI[], org.eclipse.jetty.annotations.ClassNameResolver)">parse</a></strong>(<a href="http://download.oracle.com/javase/6/docs/api/java/net/URI.html?is-external=true" title="class or interface in java.net">URI</a>[]&nbsp;uris, <a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">ClassNameResolver</a>&nbsp;resolver)</code> <div class="block">Find annotations in classes in the supplied url of jar files.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><span class="strong">AnnotationParser.</span><code><strong><a href="../../../../../org/eclipse/jetty/annotations/AnnotationParser.html#parse(java.net.URI, org.eclipse.jetty.annotations.ClassNameResolver)">parse</a></strong>(<a href="http://download.oracle.com/javase/6/docs/api/java/net/URI.html?is-external=true" title="class or interface in java.net">URI</a>&nbsp;uri, <a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">ClassNameResolver</a>&nbsp;resolver)</code>&nbsp;</td> </tr> </tbody> </table> </li> </ul> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/eclipse/jetty/annotations/ClassNameResolver.html" title="interface in org.eclipse.jetty.annotations">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/eclipse/jetty/annotations//class-useClassNameResolver.html" target="_top">Frames</a></li> <li><a href="ClassNameResolver.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 1995-2012 <a href="http://www.mortbay.com">Mort Bay Consulting</a>. All Rights Reserved.</small></p> </body> </html>
friendranjith/vizzly
jetty-runtime/javadoc/org/eclipse/jetty/annotations/class-use/ClassNameResolver.html
HTML
apache-2.0
12,063
/*-------------------------------------------------------------------------+ | | | Copyright 2005-2012 the ConQAT Project | | | | Licensed under the Apache License, Version 2.0 (the "License"); | | you may not use this file except in compliance with the License. | | You may obtain a copy of the License at | | | | http://www.apache.org/licenses/LICENSE-2.0 | | | | Unless required by applicable law or agreed to in writing, software | | distributed under the License is distributed on an "AS IS" BASIS, | | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | | See the License for the specific language governing permissions and | | limitations under the License. | +-------------------------------------------------------------------------*/ goog.provide('conqat.config.LogPage'); goog.require('conqat.config.DashboardPageBase'); goog.require('conqat.config.UnitEditPart'); goog.require('conqat.config.TemplateUtils'); goog.require('goog.dom'); goog.require('goog.dom.classes'); goog.require('goog.dom.dataset'); goog.require('goog.dom.query'); goog.require('goog.events'); goog.require('goog.events.EventType'); goog.require('goog.soy'); goog.require('goog.array'); goog.require('goog.style'); goog.require('goog.string'); goog.require('goog.object'); goog.require('goog.i18n.DateTimeFormat'); /** * Base class that handles the workflow of the log page. Saves and manipulates * the entries in the dashboard's log. * * @ConQAT.Rating GREEN Hash: 669B42C2D05EAF19026A09808629C09E * @constructor * @extends {conqat.config.DashboardPageBase} */ conqat.config.LogPage = function() { goog.base(this); /** * The levels that are not filtered out by #isProcessorVisible(). * * @private * @type {Object.<string, boolean>} */ this.visibleLevels = { 'fatal' : true, 'error' : true, 'warn' : true }; /** * The processor id that is currently displayed. null, if all processors are * shown. * * @type {?number} */ this.processorFilter = null; }; goog.inherits(conqat.config.LogPage, conqat.config.DashboardPageBase); /** * The log singleton. * * @see conqat.config.LogPage.create() * * @private * @type {conqat.config.LogPage} */ conqat.config.LogPage.instance; /** * Returns the Log singleton. Creates it if necessary. * * @private * @returns {conqat.config.LogPage} The Log singleton. */ conqat.config.LogPage.getInstance = function() { if (!conqat.config.LogPage.instance) { conqat.config.LogPage.instance = new conqat.config.LogPage(); } return conqat.config.LogPage.instance; }; /** * Opens the log page for the processor with the given ID. * * @public * @param {number} id The ID of the processor for which to open the log page. */ conqat.config.LogPage.openForProcessor = function(id) { location = 'log.html#' + id; }; /** * Creates a log page. * * @public */ conqat.config.LogPage.create = function() { var log = conqat.config.LogPage.getInstance(); log.update(); }; /** * Listens to click events on the last inserted checkbox and filters the log * according to its state. * * @public * @param {string} logLevel The level for which the checkbox is responsible. */ conqat.config.LogPage.setupFilterCheckbox = function(logLevel) { var filterBoxes = goog.dom.getElementsByTagNameAndClass('input'); var checkBox = filterBoxes[filterBoxes.length - 1]; var log = conqat.config.LogPage.getInstance(); checkBox.checked = log.visibleLevels[logLevel]; goog.events.listen(checkBox, goog.events.EventType.CLICK, function() { log.visibleLevels[logLevel] = checkBox.checked; log.update(); }); }; /** * @private * @returns {Element} The log table of the current page. */ conqat.config.LogPage.getLogTable = function() { return goog.dom.getElement('log-table'); }; /** @inheritDoc */ conqat.config.LogPage.prototype.onHistoryChanged = function(event) { var token = goog.string.isEmptySafe(event.token) ? null : goog.string .toNumber(event.token); if (this.processorFilter != token) { this.processorFilter = token; this.update(); } }; /** * Checks whether the processor with the given id and log level should be shown. * * @private */ conqat.config.LogPage.prototype.isProcessorVisible = function(processorId, logLevel) { if (this.processorFilter && processorId != this.processorFilter) { return false; } if (!this.visibleLevels[logLevel]) { return false; } return true; }; /** * Updates the log page after settings have changed and when it is first * created. * * @public */ conqat.config.LogPage.prototype.update = function() { this.updateLogTable(); this.updateParametersTable(); this.updateSubtitle(); }; /** * Updates the log table after settings have changed. * * @private */ conqat.config.LogPage.prototype.updateLogTable = function() { var that = this; var logTableHolder = goog.dom.getElement('log-table'); var dateFormat = new goog.i18n.DateTimeFormat('HH:mm:ss.SSS'); var data = { rows : [], tableHeaderClass : conqat.config.DashboardPageBase .getCSSClass('tableHeader'), evenRowClass : conqat.config.DashboardPageBase.getCSSClass('evenRow'), oddRowClass : conqat.config.DashboardPageBase.getCSSClass('oddRow') }; var processorIds = []; goog.array.forEach(conqat.config.DashboardPageBase.log, function(logEntry, index) { var processorId = logEntry[0]; var level = logEntry[1]; if (!that.isProcessorVisible(processorId, level)) { return; } var date = new Date(logEntry[3]); var processor = new conqat.config.UnitEditPart(processorId); var rowData = { name : processor.getName(), level : level, message : logEntry[2], time : dateFormat.format(date) }; data.rows.push(rowData); processorIds.push(processorId); }); if (goog.array.isEmpty(data.rows)) { goog.soy.renderElement(logTableHolder, conqat.config.DashboardTemplate.emptyLogTable); return; } goog.soy.renderElement(logTableHolder, conqat.config.DashboardTemplate.logTable, data); var rows = goog.dom.query('tbody tr', logTableHolder); goog.array.forEach(rows, function(row, index) { var processorId = processorIds[index]; var filterLink = goog.dom.getElementsByTagNameAndClass('a', 'processor', row)[0]; goog.events.listen(filterLink, goog.events.EventType.CLICK, function( event) { event.preventDefault(); that.setHistoryToken(processorId); that.filterProcessor = processorId; that.update(); }); }); } /** * Updates the parameters table after settings have changed. * * @private */ conqat.config.LogPage.prototype.updateParametersTable = function() { var logTable = conqat.config.LogPage.getLogTable(); conqat.config.TemplateUtils.clearParametersTable(); if (this.processorFilter) { var processor = new conqat.config.UnitEditPart(this.processorFilter); conqat.config.TemplateUtils.renderParametersTable(this.processorFilter); } }; /** * Updates the page subtitle after settings have changed. * * @private */ conqat.config.LogPage.prototype.updateSubtitle = function() { var logTable = conqat.config.LogPage.getLogTable(); var subtitle; if (this.processorFilter) { var processor = new conqat.config.UnitEditPart(this.processorFilter); subtitle = "Log messages for processor " + processor.getName(); } else { subtitle = "All log messages generated during ConQAT run."; } var subtitleElement = goog.dom.getElement("caption-subtitle"); goog.dom.setTextContent(subtitleElement, subtitle); };
vimaier/conqat
org.conqat.engine.html_presentation/src/org/conqat/engine/html_presentation/javascript/config/LogPage.js
JavaScript
apache-2.0
7,919
/* @internal */ namespace ts { function getModuleTransformer(moduleKind: ModuleKind): TransformerFactory<SourceFile | Bundle> { switch (moduleKind) { case ModuleKind.ESNext: case ModuleKind.ES2015: return transformES2015Module; case ModuleKind.System: return transformSystemModule; default: return transformModule; } } const enum TransformationState { Uninitialized, Initialized, Completed, Disposed } const enum SyntaxKindFeatureFlags { Substitution = 1 << 0, EmitNotifications = 1 << 1, } export function getTransformers(compilerOptions: CompilerOptions, customTransformers?: CustomTransformers) { const jsx = compilerOptions.jsx; const languageVersion = getEmitScriptTarget(compilerOptions); const moduleKind = getEmitModuleKind(compilerOptions); const transformers: TransformerFactory<SourceFile | Bundle>[] = []; addRange(transformers, customTransformers && customTransformers.before); transformers.push(transformTypeScript); if (jsx === JsxEmit.React) { transformers.push(transformJsx); } if (languageVersion < ScriptTarget.ESNext) { transformers.push(transformESNext); } if (languageVersion < ScriptTarget.ES2019) { transformers.push(transformES2019); } if (languageVersion < ScriptTarget.ES2018) { transformers.push(transformES2018); } if (languageVersion < ScriptTarget.ES2017) { transformers.push(transformES2017); } if (languageVersion < ScriptTarget.ES2016) { transformers.push(transformES2016); } if (languageVersion < ScriptTarget.ES2015) { transformers.push(transformES2015); transformers.push(transformGenerators); } transformers.push(getModuleTransformer(moduleKind)); // The ES5 transformer is last so that it can substitute expressions like `exports.default` // for ES3. if (languageVersion < ScriptTarget.ES5) { transformers.push(transformES5); } addRange(transformers, customTransformers && customTransformers.after); return transformers; } export function noEmitSubstitution(_hint: EmitHint, node: Node) { return node; } export function noEmitNotification(hint: EmitHint, node: Node, callback: (hint: EmitHint, node: Node) => void) { callback(hint, node); } /** * Transforms an array of SourceFiles by passing them through each transformer. * * @param resolver The emit resolver provided by the checker. * @param host The emit host object used to interact with the file system. * @param options Compiler options to surface in the `TransformationContext`. * @param nodes An array of nodes to transform. * @param transforms An array of `TransformerFactory` callbacks. * @param allowDtsFiles A value indicating whether to allow the transformation of .d.ts files. */ export function transformNodes<T extends Node>(resolver: EmitResolver | undefined, host: EmitHost | undefined, options: CompilerOptions, nodes: ReadonlyArray<T>, transformers: ReadonlyArray<TransformerFactory<T>>, allowDtsFiles: boolean): TransformationResult<T> { const enabledSyntaxKindFeatures = new Array<SyntaxKindFeatureFlags>(SyntaxKind.Count); let lexicalEnvironmentVariableDeclarations: VariableDeclaration[]; let lexicalEnvironmentFunctionDeclarations: FunctionDeclaration[]; let lexicalEnvironmentVariableDeclarationsStack: VariableDeclaration[][] = []; let lexicalEnvironmentFunctionDeclarationsStack: FunctionDeclaration[][] = []; let lexicalEnvironmentStackOffset = 0; let lexicalEnvironmentSuspended = false; let emitHelpers: EmitHelper[] | undefined; let onSubstituteNode: TransformationContext["onSubstituteNode"] = noEmitSubstitution; let onEmitNode: TransformationContext["onEmitNode"] = noEmitNotification; let state = TransformationState.Uninitialized; const diagnostics: DiagnosticWithLocation[] = []; // The transformation context is provided to each transformer as part of transformer // initialization. const context: TransformationContext = { getCompilerOptions: () => options, getEmitResolver: () => resolver!, // TODO: GH#18217 getEmitHost: () => host!, // TODO: GH#18217 startLexicalEnvironment, suspendLexicalEnvironment, resumeLexicalEnvironment, endLexicalEnvironment, hoistVariableDeclaration, hoistFunctionDeclaration, requestEmitHelper, readEmitHelpers, enableSubstitution, enableEmitNotification, isSubstitutionEnabled, isEmitNotificationEnabled, get onSubstituteNode() { return onSubstituteNode; }, set onSubstituteNode(value) { Debug.assert(state < TransformationState.Initialized, "Cannot modify transformation hooks after initialization has completed."); Debug.assert(value !== undefined, "Value must not be 'undefined'"); onSubstituteNode = value; }, get onEmitNode() { return onEmitNode; }, set onEmitNode(value) { Debug.assert(state < TransformationState.Initialized, "Cannot modify transformation hooks after initialization has completed."); Debug.assert(value !== undefined, "Value must not be 'undefined'"); onEmitNode = value; }, addDiagnostic(diag) { diagnostics.push(diag); } }; // Ensure the parse tree is clean before applying transformations for (const node of nodes) { disposeEmitNodes(getSourceFileOfNode(getParseTreeNode(node))); } performance.mark("beforeTransform"); // Chain together and initialize each transformer. const transformation = chain(...transformers)(context); // prevent modification of transformation hooks. state = TransformationState.Initialized; // Transform each node. const transformed = map(nodes, allowDtsFiles ? transformation : transformRoot); // prevent modification of the lexical environment. state = TransformationState.Completed; performance.mark("afterTransform"); performance.measure("transformTime", "beforeTransform", "afterTransform"); return { transformed, substituteNode, emitNodeWithNotification, dispose, diagnostics }; function transformRoot(node: T) { return node && (!isSourceFile(node) || !node.isDeclarationFile) ? transformation(node) : node; } /** * Enables expression substitutions in the pretty printer for the provided SyntaxKind. */ function enableSubstitution(kind: SyntaxKind) { Debug.assert(state < TransformationState.Completed, "Cannot modify the transformation context after transformation has completed."); enabledSyntaxKindFeatures[kind] |= SyntaxKindFeatureFlags.Substitution; } /** * Determines whether expression substitutions are enabled for the provided node. */ function isSubstitutionEnabled(node: Node) { return (enabledSyntaxKindFeatures[node.kind] & SyntaxKindFeatureFlags.Substitution) !== 0 && (getEmitFlags(node) & EmitFlags.NoSubstitution) === 0; } /** * Emits a node with possible substitution. * * @param hint A hint as to the intended usage of the node. * @param node The node to emit. * @param emitCallback The callback used to emit the node or its substitute. */ function substituteNode(hint: EmitHint, node: Node) { Debug.assert(state < TransformationState.Disposed, "Cannot substitute a node after the result is disposed."); return node && isSubstitutionEnabled(node) && onSubstituteNode(hint, node) || node; } /** * Enables before/after emit notifications in the pretty printer for the provided SyntaxKind. */ function enableEmitNotification(kind: SyntaxKind) { Debug.assert(state < TransformationState.Completed, "Cannot modify the transformation context after transformation has completed."); enabledSyntaxKindFeatures[kind] |= SyntaxKindFeatureFlags.EmitNotifications; } /** * Determines whether before/after emit notifications should be raised in the pretty * printer when it emits a node. */ function isEmitNotificationEnabled(node: Node) { return (enabledSyntaxKindFeatures[node.kind] & SyntaxKindFeatureFlags.EmitNotifications) !== 0 || (getEmitFlags(node) & EmitFlags.AdviseOnEmitNode) !== 0; } /** * Emits a node with possible emit notification. * * @param hint A hint as to the intended usage of the node. * @param node The node to emit. * @param emitCallback The callback used to emit the node. */ function emitNodeWithNotification(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void) { Debug.assert(state < TransformationState.Disposed, "Cannot invoke TransformationResult callbacks after the result is disposed."); if (node) { if (isEmitNotificationEnabled(node)) { onEmitNode(hint, node, emitCallback); } else { emitCallback(hint, node); } } } /** * Records a hoisted variable declaration for the provided name within a lexical environment. */ function hoistVariableDeclaration(name: Identifier): void { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed."); const decl = setEmitFlags(createVariableDeclaration(name), EmitFlags.NoNestedSourceMaps); if (!lexicalEnvironmentVariableDeclarations) { lexicalEnvironmentVariableDeclarations = [decl]; } else { lexicalEnvironmentVariableDeclarations.push(decl); } } /** * Records a hoisted function declaration within a lexical environment. */ function hoistFunctionDeclaration(func: FunctionDeclaration): void { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed."); if (!lexicalEnvironmentFunctionDeclarations) { lexicalEnvironmentFunctionDeclarations = [func]; } else { lexicalEnvironmentFunctionDeclarations.push(func); } } /** * Starts a new lexical environment. Any existing hoisted variable or function declarations * are pushed onto a stack, and the related storage variables are reset. */ function startLexicalEnvironment(): void { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed."); Debug.assert(!lexicalEnvironmentSuspended, "Lexical environment is suspended."); // Save the current lexical environment. Rather than resizing the array we adjust the // stack size variable. This allows us to reuse existing array slots we've // already allocated between transformations to avoid allocation and GC overhead during // transformation. lexicalEnvironmentVariableDeclarationsStack[lexicalEnvironmentStackOffset] = lexicalEnvironmentVariableDeclarations; lexicalEnvironmentFunctionDeclarationsStack[lexicalEnvironmentStackOffset] = lexicalEnvironmentFunctionDeclarations; lexicalEnvironmentStackOffset++; lexicalEnvironmentVariableDeclarations = undefined!; lexicalEnvironmentFunctionDeclarations = undefined!; } /** Suspends the current lexical environment, usually after visiting a parameter list. */ function suspendLexicalEnvironment(): void { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed."); Debug.assert(!lexicalEnvironmentSuspended, "Lexical environment is already suspended."); lexicalEnvironmentSuspended = true; } /** Resumes a suspended lexical environment, usually before visiting a function body. */ function resumeLexicalEnvironment(): void { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed."); Debug.assert(lexicalEnvironmentSuspended, "Lexical environment is not suspended."); lexicalEnvironmentSuspended = false; } /** * Ends a lexical environment. The previous set of hoisted declarations are restored and * any hoisted declarations added in this environment are returned. */ function endLexicalEnvironment(): Statement[] | undefined { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed."); Debug.assert(!lexicalEnvironmentSuspended, "Lexical environment is suspended."); let statements: Statement[] | undefined; if (lexicalEnvironmentVariableDeclarations || lexicalEnvironmentFunctionDeclarations) { if (lexicalEnvironmentFunctionDeclarations) { statements = [...lexicalEnvironmentFunctionDeclarations]; } if (lexicalEnvironmentVariableDeclarations) { const statement = createVariableStatement( /*modifiers*/ undefined, createVariableDeclarationList(lexicalEnvironmentVariableDeclarations) ); setEmitFlags(statement, EmitFlags.CustomPrologue); if (!statements) { statements = [statement]; } else { statements.push(statement); } } } // Restore the previous lexical environment. lexicalEnvironmentStackOffset--; lexicalEnvironmentVariableDeclarations = lexicalEnvironmentVariableDeclarationsStack[lexicalEnvironmentStackOffset]; lexicalEnvironmentFunctionDeclarations = lexicalEnvironmentFunctionDeclarationsStack[lexicalEnvironmentStackOffset]; if (lexicalEnvironmentStackOffset === 0) { lexicalEnvironmentVariableDeclarationsStack = []; lexicalEnvironmentFunctionDeclarationsStack = []; } return statements; } function requestEmitHelper(helper: EmitHelper): void { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the transformation context during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the transformation context after transformation has completed."); Debug.assert(!helper.scoped, "Cannot request a scoped emit helper."); emitHelpers = append(emitHelpers, helper); } function readEmitHelpers(): EmitHelper[] | undefined { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the transformation context during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the transformation context after transformation has completed."); const helpers = emitHelpers; emitHelpers = undefined; return helpers; } function dispose() { if (state < TransformationState.Disposed) { // Clean up emit nodes on parse tree for (const node of nodes) { disposeEmitNodes(getSourceFileOfNode(getParseTreeNode(node))); } // Release references to external entries for GC purposes. lexicalEnvironmentVariableDeclarations = undefined!; lexicalEnvironmentVariableDeclarationsStack = undefined!; lexicalEnvironmentFunctionDeclarations = undefined!; lexicalEnvironmentFunctionDeclarationsStack = undefined!; onSubstituteNode = undefined!; onEmitNode = undefined!; emitHelpers = undefined; // Prevent further use of the transformation result. state = TransformationState.Disposed; } } } }
weswigham/TypeScript
src/compiler/transformer.ts
TypeScript
apache-2.0
18,593
// Copyright 2020 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel // protoc v3.12.3 // source: proto/google/fhir/proto/r5/core/resources/medication_usage.proto package medication_usage_go_proto import ( any "github.com/golang/protobuf/ptypes/any" _ "github.com/google/fhir/go/proto/google/fhir/proto/annotations_go_proto" codes_go_proto "github.com/google/fhir/go/proto/google/fhir/proto/r5/core/codes_go_proto" datatypes_go_proto "github.com/google/fhir/go/proto/google/fhir/proto/r5/core/datatypes_go_proto" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // Auto-generated from StructureDefinition for MedicationUsage, last updated // 2019-12-31T21:03:40.621+11:00. Record of medication being taken by a patient. // See http://hl7.org/fhir/StructureDefinition/MedicationUsage type MedicationUsage struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Logical id of this artifact Id *datatypes_go_proto.Id `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // Metadata about the resource Meta *datatypes_go_proto.Meta `protobuf:"bytes,2,opt,name=meta,proto3" json:"meta,omitempty"` // A set of rules under which this content was created ImplicitRules *datatypes_go_proto.Uri `protobuf:"bytes,3,opt,name=implicit_rules,json=implicitRules,proto3" json:"implicit_rules,omitempty"` // Language of the resource content Language *datatypes_go_proto.Code `protobuf:"bytes,4,opt,name=language,proto3" json:"language,omitempty"` // Text summary of the resource, for human interpretation Text *datatypes_go_proto.Narrative `protobuf:"bytes,5,opt,name=text,proto3" json:"text,omitempty"` // Contained, inline Resources Contained []*any.Any `protobuf:"bytes,6,rep,name=contained,proto3" json:"contained,omitempty"` // Additional content defined by implementations Extension []*datatypes_go_proto.Extension `protobuf:"bytes,8,rep,name=extension,proto3" json:"extension,omitempty"` // Extensions that cannot be ignored ModifierExtension []*datatypes_go_proto.Extension `protobuf:"bytes,9,rep,name=modifier_extension,json=modifierExtension,proto3" json:"modifier_extension,omitempty"` // External identifier Identifier []*datatypes_go_proto.Identifier `protobuf:"bytes,10,rep,name=identifier,proto3" json:"identifier,omitempty"` // Fulfils plan, proposal or order BasedOn []*datatypes_go_proto.Reference `protobuf:"bytes,11,rep,name=based_on,json=basedOn,proto3" json:"based_on,omitempty"` // Part of referenced event PartOf []*datatypes_go_proto.Reference `protobuf:"bytes,12,rep,name=part_of,json=partOf,proto3" json:"part_of,omitempty"` Status *MedicationUsage_StatusCode `protobuf:"bytes,13,opt,name=status,proto3" json:"status,omitempty"` // Reason for current status StatusReason []*datatypes_go_proto.CodeableConcept `protobuf:"bytes,14,rep,name=status_reason,json=statusReason,proto3" json:"status_reason,omitempty"` // Type of medication usage Category []*datatypes_go_proto.CodeableConcept `protobuf:"bytes,15,rep,name=category,proto3" json:"category,omitempty"` Medication *MedicationUsage_MedicationX `protobuf:"bytes,16,opt,name=medication,proto3" json:"medication,omitempty"` // Who is/was taking the medication Subject *datatypes_go_proto.Reference `protobuf:"bytes,17,opt,name=subject,proto3" json:"subject,omitempty"` // Encounter associated with MedicationUsage Encounter *datatypes_go_proto.Reference `protobuf:"bytes,18,opt,name=encounter,proto3" json:"encounter,omitempty"` Effective *MedicationUsage_EffectiveX `protobuf:"bytes,19,opt,name=effective,proto3" json:"effective,omitempty"` // When the usage was asserted? DateAsserted *datatypes_go_proto.DateTime `protobuf:"bytes,20,opt,name=date_asserted,json=dateAsserted,proto3" json:"date_asserted,omitempty"` // Person or organization that provided the information about the taking of // this medication InformationSource *datatypes_go_proto.Reference `protobuf:"bytes,21,opt,name=information_source,json=informationSource,proto3" json:"information_source,omitempty"` // Link to information used to derive the MedicationUsage DerivedFrom []*datatypes_go_proto.Reference `protobuf:"bytes,22,rep,name=derived_from,json=derivedFrom,proto3" json:"derived_from,omitempty"` // Reason for why the medication is being/was taken Reason []*datatypes_go_proto.CodeableReference `protobuf:"bytes,23,rep,name=reason,proto3" json:"reason,omitempty"` // Further information about the usage Note []*datatypes_go_proto.Annotation `protobuf:"bytes,24,rep,name=note,proto3" json:"note,omitempty"` // Full representation of the dosage instructions RenderedDosageInstruction *datatypes_go_proto.String `protobuf:"bytes,25,opt,name=rendered_dosage_instruction,json=renderedDosageInstruction,proto3" json:"rendered_dosage_instruction,omitempty"` // Details of how medication is/was taken or should be taken Dosage []*datatypes_go_proto.Dosage `protobuf:"bytes,26,rep,name=dosage,proto3" json:"dosage,omitempty"` // Indicates if the medication is being consumed or administered as prescribed TakenAsOrdered *datatypes_go_proto.Boolean `protobuf:"bytes,27,opt,name=taken_as_ordered,json=takenAsOrdered,proto3" json:"taken_as_ordered,omitempty"` } func (x *MedicationUsage) Reset() { *x = MedicationUsage{} if protoimpl.UnsafeEnabled { mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MedicationUsage) String() string { return protoimpl.X.MessageStringOf(x) } func (*MedicationUsage) ProtoMessage() {} func (x *MedicationUsage) ProtoReflect() protoreflect.Message { mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MedicationUsage.ProtoReflect.Descriptor instead. func (*MedicationUsage) Descriptor() ([]byte, []int) { return file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescGZIP(), []int{0} } func (x *MedicationUsage) GetId() *datatypes_go_proto.Id { if x != nil { return x.Id } return nil } func (x *MedicationUsage) GetMeta() *datatypes_go_proto.Meta { if x != nil { return x.Meta } return nil } func (x *MedicationUsage) GetImplicitRules() *datatypes_go_proto.Uri { if x != nil { return x.ImplicitRules } return nil } func (x *MedicationUsage) GetLanguage() *datatypes_go_proto.Code { if x != nil { return x.Language } return nil } func (x *MedicationUsage) GetText() *datatypes_go_proto.Narrative { if x != nil { return x.Text } return nil } func (x *MedicationUsage) GetContained() []*any.Any { if x != nil { return x.Contained } return nil } func (x *MedicationUsage) GetExtension() []*datatypes_go_proto.Extension { if x != nil { return x.Extension } return nil } func (x *MedicationUsage) GetModifierExtension() []*datatypes_go_proto.Extension { if x != nil { return x.ModifierExtension } return nil } func (x *MedicationUsage) GetIdentifier() []*datatypes_go_proto.Identifier { if x != nil { return x.Identifier } return nil } func (x *MedicationUsage) GetBasedOn() []*datatypes_go_proto.Reference { if x != nil { return x.BasedOn } return nil } func (x *MedicationUsage) GetPartOf() []*datatypes_go_proto.Reference { if x != nil { return x.PartOf } return nil } func (x *MedicationUsage) GetStatus() *MedicationUsage_StatusCode { if x != nil { return x.Status } return nil } func (x *MedicationUsage) GetStatusReason() []*datatypes_go_proto.CodeableConcept { if x != nil { return x.StatusReason } return nil } func (x *MedicationUsage) GetCategory() []*datatypes_go_proto.CodeableConcept { if x != nil { return x.Category } return nil } func (x *MedicationUsage) GetMedication() *MedicationUsage_MedicationX { if x != nil { return x.Medication } return nil } func (x *MedicationUsage) GetSubject() *datatypes_go_proto.Reference { if x != nil { return x.Subject } return nil } func (x *MedicationUsage) GetEncounter() *datatypes_go_proto.Reference { if x != nil { return x.Encounter } return nil } func (x *MedicationUsage) GetEffective() *MedicationUsage_EffectiveX { if x != nil { return x.Effective } return nil } func (x *MedicationUsage) GetDateAsserted() *datatypes_go_proto.DateTime { if x != nil { return x.DateAsserted } return nil } func (x *MedicationUsage) GetInformationSource() *datatypes_go_proto.Reference { if x != nil { return x.InformationSource } return nil } func (x *MedicationUsage) GetDerivedFrom() []*datatypes_go_proto.Reference { if x != nil { return x.DerivedFrom } return nil } func (x *MedicationUsage) GetReason() []*datatypes_go_proto.CodeableReference { if x != nil { return x.Reason } return nil } func (x *MedicationUsage) GetNote() []*datatypes_go_proto.Annotation { if x != nil { return x.Note } return nil } func (x *MedicationUsage) GetRenderedDosageInstruction() *datatypes_go_proto.String { if x != nil { return x.RenderedDosageInstruction } return nil } func (x *MedicationUsage) GetDosage() []*datatypes_go_proto.Dosage { if x != nil { return x.Dosage } return nil } func (x *MedicationUsage) GetTakenAsOrdered() *datatypes_go_proto.Boolean { if x != nil { return x.TakenAsOrdered } return nil } // active | completed | entered-in-error | intended | stopped | on-hold | // unknown | not-taken type MedicationUsage_StatusCode struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Value codes_go_proto.MedicationUsageStatusCode_Value `protobuf:"varint,1,opt,name=value,proto3,enum=google.fhir.r5.core.MedicationUsageStatusCode_Value" json:"value,omitempty"` Id *datatypes_go_proto.String `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` Extension []*datatypes_go_proto.Extension `protobuf:"bytes,3,rep,name=extension,proto3" json:"extension,omitempty"` } func (x *MedicationUsage_StatusCode) Reset() { *x = MedicationUsage_StatusCode{} if protoimpl.UnsafeEnabled { mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MedicationUsage_StatusCode) String() string { return protoimpl.X.MessageStringOf(x) } func (*MedicationUsage_StatusCode) ProtoMessage() {} func (x *MedicationUsage_StatusCode) ProtoReflect() protoreflect.Message { mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MedicationUsage_StatusCode.ProtoReflect.Descriptor instead. func (*MedicationUsage_StatusCode) Descriptor() ([]byte, []int) { return file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescGZIP(), []int{0, 0} } func (x *MedicationUsage_StatusCode) GetValue() codes_go_proto.MedicationUsageStatusCode_Value { if x != nil { return x.Value } return codes_go_proto.MedicationUsageStatusCode_INVALID_UNINITIALIZED } func (x *MedicationUsage_StatusCode) GetId() *datatypes_go_proto.String { if x != nil { return x.Id } return nil } func (x *MedicationUsage_StatusCode) GetExtension() []*datatypes_go_proto.Extension { if x != nil { return x.Extension } return nil } // What medication was taken type MedicationUsage_MedicationX struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to Choice: // *MedicationUsage_MedicationX_CodeableConcept // *MedicationUsage_MedicationX_Reference Choice isMedicationUsage_MedicationX_Choice `protobuf_oneof:"choice"` } func (x *MedicationUsage_MedicationX) Reset() { *x = MedicationUsage_MedicationX{} if protoimpl.UnsafeEnabled { mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MedicationUsage_MedicationX) String() string { return protoimpl.X.MessageStringOf(x) } func (*MedicationUsage_MedicationX) ProtoMessage() {} func (x *MedicationUsage_MedicationX) ProtoReflect() protoreflect.Message { mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MedicationUsage_MedicationX.ProtoReflect.Descriptor instead. func (*MedicationUsage_MedicationX) Descriptor() ([]byte, []int) { return file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescGZIP(), []int{0, 1} } func (m *MedicationUsage_MedicationX) GetChoice() isMedicationUsage_MedicationX_Choice { if m != nil { return m.Choice } return nil } func (x *MedicationUsage_MedicationX) GetCodeableConcept() *datatypes_go_proto.CodeableConcept { if x, ok := x.GetChoice().(*MedicationUsage_MedicationX_CodeableConcept); ok { return x.CodeableConcept } return nil } func (x *MedicationUsage_MedicationX) GetReference() *datatypes_go_proto.Reference { if x, ok := x.GetChoice().(*MedicationUsage_MedicationX_Reference); ok { return x.Reference } return nil } type isMedicationUsage_MedicationX_Choice interface { isMedicationUsage_MedicationX_Choice() } type MedicationUsage_MedicationX_CodeableConcept struct { CodeableConcept *datatypes_go_proto.CodeableConcept `protobuf:"bytes,1,opt,name=codeable_concept,json=codeableConcept,proto3,oneof"` } type MedicationUsage_MedicationX_Reference struct { Reference *datatypes_go_proto.Reference `protobuf:"bytes,2,opt,name=reference,proto3,oneof"` } func (*MedicationUsage_MedicationX_CodeableConcept) isMedicationUsage_MedicationX_Choice() {} func (*MedicationUsage_MedicationX_Reference) isMedicationUsage_MedicationX_Choice() {} // The date/time or interval when the medication is/was/will be taken type MedicationUsage_EffectiveX struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to Choice: // *MedicationUsage_EffectiveX_DateTime // *MedicationUsage_EffectiveX_Period Choice isMedicationUsage_EffectiveX_Choice `protobuf_oneof:"choice"` } func (x *MedicationUsage_EffectiveX) Reset() { *x = MedicationUsage_EffectiveX{} if protoimpl.UnsafeEnabled { mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MedicationUsage_EffectiveX) String() string { return protoimpl.X.MessageStringOf(x) } func (*MedicationUsage_EffectiveX) ProtoMessage() {} func (x *MedicationUsage_EffectiveX) ProtoReflect() protoreflect.Message { mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MedicationUsage_EffectiveX.ProtoReflect.Descriptor instead. func (*MedicationUsage_EffectiveX) Descriptor() ([]byte, []int) { return file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescGZIP(), []int{0, 2} } func (m *MedicationUsage_EffectiveX) GetChoice() isMedicationUsage_EffectiveX_Choice { if m != nil { return m.Choice } return nil } func (x *MedicationUsage_EffectiveX) GetDateTime() *datatypes_go_proto.DateTime { if x, ok := x.GetChoice().(*MedicationUsage_EffectiveX_DateTime); ok { return x.DateTime } return nil } func (x *MedicationUsage_EffectiveX) GetPeriod() *datatypes_go_proto.Period { if x, ok := x.GetChoice().(*MedicationUsage_EffectiveX_Period); ok { return x.Period } return nil } type isMedicationUsage_EffectiveX_Choice interface { isMedicationUsage_EffectiveX_Choice() } type MedicationUsage_EffectiveX_DateTime struct { DateTime *datatypes_go_proto.DateTime `protobuf:"bytes,1,opt,name=date_time,json=dateTime,proto3,oneof"` } type MedicationUsage_EffectiveX_Period struct { Period *datatypes_go_proto.Period `protobuf:"bytes,2,opt,name=period,proto3,oneof"` } func (*MedicationUsage_EffectiveX_DateTime) isMedicationUsage_EffectiveX_Choice() {} func (*MedicationUsage_EffectiveX_Period) isMedicationUsage_EffectiveX_Choice() {} var File_proto_google_fhir_proto_r5_core_resources_medication_usage_proto protoreflect.FileDescriptor var file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDesc = []byte{ 0x0a, 0x40, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x72, 0x35, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, 0x6d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x75, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x13, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x29, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2b, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x72, 0x35, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x72, 0x35, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xd8, 0x15, 0x0a, 0x0f, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x12, 0x27, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x52, 0x02, 0x69, 0x64, 0x12, 0x2d, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x12, 0x3f, 0x0a, 0x0e, 0x69, 0x6d, 0x70, 0x6c, 0x69, 0x63, 0x69, 0x74, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x55, 0x72, 0x69, 0x52, 0x0d, 0x69, 0x6d, 0x70, 0x6c, 0x69, 0x63, 0x69, 0x74, 0x52, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x35, 0x0a, 0x08, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x52, 0x08, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x12, 0x32, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x61, 0x72, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x32, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x64, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x64, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x4d, 0x0a, 0x12, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x11, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3f, 0x0a, 0x0a, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0a, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x74, 0x0a, 0x08, 0x62, 0x61, 0x73, 0x65, 0x64, 0x5f, 0x6f, 0x6e, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x39, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x11, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x08, 0x43, 0x61, 0x72, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x0e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x07, 0x62, 0x61, 0x73, 0x65, 0x64, 0x4f, 0x6e, 0x12, 0xa4, 0x01, 0x0a, 0x07, 0x70, 0x61, 0x72, 0x74, 0x5f, 0x6f, 0x66, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x6b, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x18, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x12, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x69, 0x73, 0x70, 0x65, 0x6e, 0x73, 0x65, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x0f, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x09, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x64, 0x75, 0x72, 0x65, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x0b, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x06, 0x70, 0x61, 0x72, 0x74, 0x4f, 0x66, 0x12, 0x4f, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x43, 0x6f, 0x64, 0x65, 0x42, 0x06, 0xf0, 0xd0, 0x87, 0xeb, 0x04, 0x01, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x49, 0x0a, 0x0d, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5f, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x63, 0x65, 0x70, 0x74, 0x52, 0x0c, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x0f, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x63, 0x65, 0x70, 0x74, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x58, 0x0a, 0x0a, 0x6d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x58, 0x42, 0x06, 0xf0, 0xd0, 0x87, 0xeb, 0x04, 0x01, 0x52, 0x0a, 0x6d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x58, 0x0a, 0x07, 0x73, 0x75, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x1e, 0xf0, 0xd0, 0x87, 0xeb, 0x04, 0x01, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x07, 0x50, 0x61, 0x74, 0x69, 0x65, 0x6e, 0x74, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x05, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x52, 0x07, 0x73, 0x75, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x4d, 0x0a, 0x09, 0x65, 0x6e, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x0f, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x09, 0x45, 0x6e, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x52, 0x09, 0x65, 0x6e, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x12, 0x4d, 0x0a, 0x09, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x58, 0x52, 0x09, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x12, 0x42, 0x0a, 0x0d, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x61, 0x73, 0x73, 0x65, 0x72, 0x74, 0x65, 0x64, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x52, 0x0c, 0x64, 0x61, 0x74, 0x65, 0x41, 0x73, 0x73, 0x65, 0x72, 0x74, 0x65, 0x64, 0x12, 0xa9, 0x01, 0x0a, 0x12, 0x69, 0x6e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x5a, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x07, 0x50, 0x61, 0x74, 0x69, 0x65, 0x6e, 0x74, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x0c, 0x50, 0x72, 0x61, 0x63, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x10, 0x50, 0x72, 0x61, 0x63, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x52, 0x6f, 0x6c, 0x65, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x0d, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x65, 0x64, 0x50, 0x65, 0x72, 0x73, 0x6f, 0x6e, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x0c, 0x4f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x11, 0x69, 0x6e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x51, 0x0a, 0x0c, 0x64, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x5f, 0x66, 0x72, 0x6f, 0x6d, 0x18, 0x16, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x0e, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0b, 0x64, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x46, 0x72, 0x6f, 0x6d, 0x12, 0x3e, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x17, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x33, 0x0a, 0x04, 0x6e, 0x6f, 0x74, 0x65, 0x18, 0x18, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x6e, 0x6f, 0x74, 0x65, 0x12, 0x5b, 0x0a, 0x1b, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x65, 0x64, 0x5f, 0x64, 0x6f, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x69, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x19, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x65, 0x64, 0x44, 0x6f, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x33, 0x0a, 0x06, 0x64, 0x6f, 0x73, 0x61, 0x67, 0x65, 0x18, 0x1a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x6f, 0x73, 0x61, 0x67, 0x65, 0x52, 0x06, 0x64, 0x6f, 0x73, 0x61, 0x67, 0x65, 0x12, 0x46, 0x0a, 0x10, 0x74, 0x61, 0x6b, 0x65, 0x6e, 0x5f, 0x61, 0x73, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x65, 0x64, 0x18, 0x1b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x42, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x52, 0x0e, 0x74, 0x61, 0x6b, 0x65, 0x6e, 0x41, 0x73, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x65, 0x64, 0x1a, 0xb7, 0x02, 0x0a, 0x0a, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x4a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x34, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x43, 0x6f, 0x64, 0x65, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x2b, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x02, 0x69, 0x64, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x72, 0xc0, 0x9f, 0xe3, 0xb6, 0x05, 0x01, 0x8a, 0xf9, 0x83, 0xb2, 0x05, 0x34, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x68, 0x6c, 0x37, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x53, 0x65, 0x74, 0x2f, 0x6d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2d, 0x75, 0x73, 0x61, 0x67, 0x65, 0x2d, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x9a, 0xb5, 0x8e, 0x93, 0x06, 0x2c, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x68, 0x6c, 0x37, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x75, 0x72, 0x65, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x1a, 0xc4, 0x01, 0x0a, 0x0b, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x58, 0x12, 0x51, 0x0a, 0x10, 0x63, 0x6f, 0x64, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x63, 0x65, 0x70, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x63, 0x65, 0x70, 0x74, 0x48, 0x00, 0x52, 0x0f, 0x63, 0x6f, 0x64, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x63, 0x65, 0x70, 0x74, 0x12, 0x50, 0x0a, 0x09, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x10, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x0a, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x09, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x3a, 0x06, 0xa0, 0x83, 0x83, 0xe8, 0x06, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x63, 0x68, 0x6f, 0x69, 0x63, 0x65, 0x1a, 0x93, 0x01, 0x0a, 0x0a, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x58, 0x12, 0x3c, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x48, 0x00, 0x52, 0x08, 0x64, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x06, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x50, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x48, 0x00, 0x52, 0x06, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x3a, 0x06, 0xa0, 0x83, 0x83, 0xe8, 0x06, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x63, 0x68, 0x6f, 0x69, 0x63, 0x65, 0x3a, 0x43, 0xc0, 0x9f, 0xe3, 0xb6, 0x05, 0x03, 0xb2, 0xfe, 0xe4, 0x97, 0x06, 0x37, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x68, 0x6c, 0x37, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x75, 0x72, 0x65, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x4a, 0x04, 0x08, 0x07, 0x10, 0x08, 0x42, 0x80, 0x01, 0x0a, 0x17, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x50, 0x01, 0x5a, 0x5d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x72, 0x35, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, 0x6d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x75, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x6f, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x98, 0xc6, 0xb0, 0xb5, 0x07, 0x05, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescOnce sync.Once file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescData = file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDesc ) func file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescGZIP() []byte { file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescOnce.Do(func() { file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescData = protoimpl.X.CompressGZIP(file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescData) }) return file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescData } var file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes = make([]protoimpl.MessageInfo, 4) var file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_goTypes = []interface{}{ (*MedicationUsage)(nil), // 0: google.fhir.r5.core.MedicationUsage (*MedicationUsage_StatusCode)(nil), // 1: google.fhir.r5.core.MedicationUsage.StatusCode (*MedicationUsage_MedicationX)(nil), // 2: google.fhir.r5.core.MedicationUsage.MedicationX (*MedicationUsage_EffectiveX)(nil), // 3: google.fhir.r5.core.MedicationUsage.EffectiveX (*datatypes_go_proto.Id)(nil), // 4: google.fhir.r5.core.Id (*datatypes_go_proto.Meta)(nil), // 5: google.fhir.r5.core.Meta (*datatypes_go_proto.Uri)(nil), // 6: google.fhir.r5.core.Uri (*datatypes_go_proto.Code)(nil), // 7: google.fhir.r5.core.Code (*datatypes_go_proto.Narrative)(nil), // 8: google.fhir.r5.core.Narrative (*any.Any)(nil), // 9: google.protobuf.Any (*datatypes_go_proto.Extension)(nil), // 10: google.fhir.r5.core.Extension (*datatypes_go_proto.Identifier)(nil), // 11: google.fhir.r5.core.Identifier (*datatypes_go_proto.Reference)(nil), // 12: google.fhir.r5.core.Reference (*datatypes_go_proto.CodeableConcept)(nil), // 13: google.fhir.r5.core.CodeableConcept (*datatypes_go_proto.DateTime)(nil), // 14: google.fhir.r5.core.DateTime (*datatypes_go_proto.CodeableReference)(nil), // 15: google.fhir.r5.core.CodeableReference (*datatypes_go_proto.Annotation)(nil), // 16: google.fhir.r5.core.Annotation (*datatypes_go_proto.String)(nil), // 17: google.fhir.r5.core.String (*datatypes_go_proto.Dosage)(nil), // 18: google.fhir.r5.core.Dosage (*datatypes_go_proto.Boolean)(nil), // 19: google.fhir.r5.core.Boolean (codes_go_proto.MedicationUsageStatusCode_Value)(0), // 20: google.fhir.r5.core.MedicationUsageStatusCode.Value (*datatypes_go_proto.Period)(nil), // 21: google.fhir.r5.core.Period } var file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_depIdxs = []int32{ 4, // 0: google.fhir.r5.core.MedicationUsage.id:type_name -> google.fhir.r5.core.Id 5, // 1: google.fhir.r5.core.MedicationUsage.meta:type_name -> google.fhir.r5.core.Meta 6, // 2: google.fhir.r5.core.MedicationUsage.implicit_rules:type_name -> google.fhir.r5.core.Uri 7, // 3: google.fhir.r5.core.MedicationUsage.language:type_name -> google.fhir.r5.core.Code 8, // 4: google.fhir.r5.core.MedicationUsage.text:type_name -> google.fhir.r5.core.Narrative 9, // 5: google.fhir.r5.core.MedicationUsage.contained:type_name -> google.protobuf.Any 10, // 6: google.fhir.r5.core.MedicationUsage.extension:type_name -> google.fhir.r5.core.Extension 10, // 7: google.fhir.r5.core.MedicationUsage.modifier_extension:type_name -> google.fhir.r5.core.Extension 11, // 8: google.fhir.r5.core.MedicationUsage.identifier:type_name -> google.fhir.r5.core.Identifier 12, // 9: google.fhir.r5.core.MedicationUsage.based_on:type_name -> google.fhir.r5.core.Reference 12, // 10: google.fhir.r5.core.MedicationUsage.part_of:type_name -> google.fhir.r5.core.Reference 1, // 11: google.fhir.r5.core.MedicationUsage.status:type_name -> google.fhir.r5.core.MedicationUsage.StatusCode 13, // 12: google.fhir.r5.core.MedicationUsage.status_reason:type_name -> google.fhir.r5.core.CodeableConcept 13, // 13: google.fhir.r5.core.MedicationUsage.category:type_name -> google.fhir.r5.core.CodeableConcept 2, // 14: google.fhir.r5.core.MedicationUsage.medication:type_name -> google.fhir.r5.core.MedicationUsage.MedicationX 12, // 15: google.fhir.r5.core.MedicationUsage.subject:type_name -> google.fhir.r5.core.Reference 12, // 16: google.fhir.r5.core.MedicationUsage.encounter:type_name -> google.fhir.r5.core.Reference 3, // 17: google.fhir.r5.core.MedicationUsage.effective:type_name -> google.fhir.r5.core.MedicationUsage.EffectiveX 14, // 18: google.fhir.r5.core.MedicationUsage.date_asserted:type_name -> google.fhir.r5.core.DateTime 12, // 19: google.fhir.r5.core.MedicationUsage.information_source:type_name -> google.fhir.r5.core.Reference 12, // 20: google.fhir.r5.core.MedicationUsage.derived_from:type_name -> google.fhir.r5.core.Reference 15, // 21: google.fhir.r5.core.MedicationUsage.reason:type_name -> google.fhir.r5.core.CodeableReference 16, // 22: google.fhir.r5.core.MedicationUsage.note:type_name -> google.fhir.r5.core.Annotation 17, // 23: google.fhir.r5.core.MedicationUsage.rendered_dosage_instruction:type_name -> google.fhir.r5.core.String 18, // 24: google.fhir.r5.core.MedicationUsage.dosage:type_name -> google.fhir.r5.core.Dosage 19, // 25: google.fhir.r5.core.MedicationUsage.taken_as_ordered:type_name -> google.fhir.r5.core.Boolean 20, // 26: google.fhir.r5.core.MedicationUsage.StatusCode.value:type_name -> google.fhir.r5.core.MedicationUsageStatusCode.Value 17, // 27: google.fhir.r5.core.MedicationUsage.StatusCode.id:type_name -> google.fhir.r5.core.String 10, // 28: google.fhir.r5.core.MedicationUsage.StatusCode.extension:type_name -> google.fhir.r5.core.Extension 13, // 29: google.fhir.r5.core.MedicationUsage.MedicationX.codeable_concept:type_name -> google.fhir.r5.core.CodeableConcept 12, // 30: google.fhir.r5.core.MedicationUsage.MedicationX.reference:type_name -> google.fhir.r5.core.Reference 14, // 31: google.fhir.r5.core.MedicationUsage.EffectiveX.date_time:type_name -> google.fhir.r5.core.DateTime 21, // 32: google.fhir.r5.core.MedicationUsage.EffectiveX.period:type_name -> google.fhir.r5.core.Period 33, // [33:33] is the sub-list for method output_type 33, // [33:33] is the sub-list for method input_type 33, // [33:33] is the sub-list for extension type_name 33, // [33:33] is the sub-list for extension extendee 0, // [0:33] is the sub-list for field type_name } func init() { file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_init() } func file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_init() { if File_proto_google_fhir_proto_r5_core_resources_medication_usage_proto != nil { return } if !protoimpl.UnsafeEnabled { file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MedicationUsage); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MedicationUsage_StatusCode); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MedicationUsage_MedicationX); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MedicationUsage_EffectiveX); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[2].OneofWrappers = []interface{}{ (*MedicationUsage_MedicationX_CodeableConcept)(nil), (*MedicationUsage_MedicationX_Reference)(nil), } file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[3].OneofWrappers = []interface{}{ (*MedicationUsage_EffectiveX_DateTime)(nil), (*MedicationUsage_EffectiveX_Period)(nil), } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDesc, NumEnums: 0, NumMessages: 4, NumExtensions: 0, NumServices: 0, }, GoTypes: file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_goTypes, DependencyIndexes: file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_depIdxs, MessageInfos: file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes, }.Build() File_proto_google_fhir_proto_r5_core_resources_medication_usage_proto = out.File file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDesc = nil file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_goTypes = nil file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_depIdxs = nil }
google/fhir
go/proto/google/fhir/proto/r5/core/resources/medication_usage_go_proto/medication_usage.pb.go
GO
apache-2.0
47,253
package Paws::ES::InstanceCountLimits; use Moose; has MaximumInstanceCount => (is => 'ro', isa => 'Int'); has MinimumInstanceCount => (is => 'ro', isa => 'Int'); 1; ### main pod documentation begin ### =head1 NAME Paws::ES::InstanceCountLimits =head1 USAGE This class represents one of two things: =head3 Arguments in a call to a service Use the attributes of this class as arguments to methods. You shouldn't make instances of this class. Each attribute should be used as a named argument in the calls that expect this type of object. As an example, if Att1 is expected to be a Paws::ES::InstanceCountLimits object: $service_obj->Method(Att1 => { MaximumInstanceCount => $value, ..., MinimumInstanceCount => $value }); =head3 Results returned from an API call Use accessors for each attribute. If Att1 is expected to be an Paws::ES::InstanceCountLimits object: $result = $service_obj->Method(...); $result->Att1->MaximumInstanceCount =head1 DESCRIPTION InstanceCountLimits represents the limits on number of instances that be created in Amazon Elasticsearch for given InstanceType. =head1 ATTRIBUTES =head2 MaximumInstanceCount => Int =head2 MinimumInstanceCount => Int =head1 SEE ALSO This class forms part of L<Paws>, describing an object used in L<Paws::ES> =head1 BUGS and CONTRIBUTIONS The source code is located here: https://github.com/pplu/aws-sdk-perl Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues =cut
ioanrogers/aws-sdk-perl
auto-lib/Paws/ES/InstanceCountLimits.pm
Perl
apache-2.0
1,484
/* * Copyright 2013 must-be.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package consulo.xstylesheet.definition.impl; import com.intellij.codeInsight.daemon.impl.HighlightInfo; import com.intellij.codeInsight.lookup.LookupElement; import consulo.xstylesheet.definition.XStyleSheetPropertyValuePart; import consulo.xstylesheet.definition.XStyleSheetPropertyValuePartParser; import consulo.xstylesheet.psi.PsiXStyleSheetPropertyValuePart; import javax.annotation.Nonnull; import java.util.List; /** * @author VISTALL * @since 03.07.13. */ public class XStyleSheetPropertyValuePartImpl implements XStyleSheetPropertyValuePart { private final XStyleSheetPropertyValuePartParser myParser; private final String myValue; public XStyleSheetPropertyValuePartImpl(XStyleSheetPropertyValuePartParser parser, String value) { myParser = parser; myValue = value; } @Override public XStyleSheetPropertyValuePartParser getParser() { return myParser; } @Nonnull @Override public List<HighlightInfo> createHighlights(@Nonnull PsiXStyleSheetPropertyValuePart valuePart) { return myParser.createHighlights(valuePart); } @Override public String getValue() { return myValue; } @Override public Object getNativeValue(PsiXStyleSheetPropertyValuePart part) { return myParser.getNativeValue(part, myValue); } @Override public boolean setNativeValue(@Nonnull XStyleSheetPropertyValuePart part, Object value) { return false; } @Override public List<LookupElement> getLookupElements() { return myParser.getLookupElements(myValue); } }
consulo/consulo-css
xstylesheet-api/src/main/java/consulo/xstylesheet/definition/impl/XStyleSheetPropertyValuePartImpl.java
Java
apache-2.0
2,084
package com.tomorrowhi.thdemo.activitys; import android.os.Bundle; import android.os.Handler; import android.support.v4.content.ContextCompat; import android.view.View; import android.widget.ImageButton; import android.widget.RelativeLayout; import android.widget.TextView; import com.amap.api.maps.AMap; import com.amap.api.maps.CameraUpdateFactory; import com.amap.api.maps.MapView; import com.amap.api.maps.UiSettings; import com.amap.api.maps.model.BitmapDescriptorFactory; import com.amap.api.maps.model.LatLng; import com.amap.api.maps.model.Marker; import com.amap.api.maps.model.MarkerOptions; import com.amap.api.maps.model.PolylineOptions; import com.amap.api.services.core.AMapException; import com.amap.api.services.core.LatLonPoint; import com.amap.api.services.geocoder.GeocodeResult; import com.amap.api.services.geocoder.GeocodeSearch; import com.amap.api.services.geocoder.RegeocodeAddress; import com.amap.api.services.geocoder.RegeocodeQuery; import com.amap.api.services.geocoder.RegeocodeResult; import com.blankj.utilcode.util.LogUtils; import com.tomorrowhi.thdemo.R; import com.tomorrowhi.thdemo.base.BaseActivity; import com.tomorrowhi.thdemo.bean.LocusPointBean; import com.tomorrowhi.thdemo.util.locationUtiils.LocationUtil; import org.greenrobot.eventbus.Subscribe; import org.greenrobot.eventbus.ThreadMode; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import butterknife.BindView; import butterknife.OnClick; /** * Created by zhaotaotao on 10/01/2017. * 高德 历史轨迹 */ public class AMapLocusActivity extends BaseActivity implements GeocodeSearch.OnGeocodeSearchListener, AMap.InfoWindowAdapter { @BindView(R.id.title_return_iv) ImageButton titleReturnIv; @BindView(R.id.amap_map_view) MapView amapMapView; @BindView(R.id.a_map_app_location_desc) TextView aMapAppLocationDesc; @BindView(R.id.a_map_locus_bottom_desc) RelativeLayout aMapBottomDesc; private AMap aMap; private ReadThread playLocus; private MarkerOptions locusMarkerOption; private GeocodeSearch geocoderSearch; private ExecutorService mExecutorService; private PolylineOptions options; private long threadRunTimeIntervalLong = 1700; //绘制线的最长间隔 private long threadRunTimeInterval = 200; //绘制线的间隔差值 private long threadRunTimeIntervalShort = 1000; //绘制线的最短时间间隔 private List<LocusPointBean> locusPointBeanList = new ArrayList<>(); private double addNum = 0.001; private double defaultLat = 22.5714712; private double defaultLng = 113.8619078; private int countMarker = 0; private View markerView; private TextView markerNumTv; private LatLng lastLatlng = null; @Override protected int getLayoutRes() { return R.layout.activity_amap_locus; } @Override protected void initComplete(Bundle savedInstanceState) { } @Override protected void initEvent() { new Handler().postDelayed(new Runnable() { @Override public void run() { startPlayLocus(); } }, 2000); aMap.setOnMarkerClickListener(new AMap.OnMarkerClickListener() { @Override public boolean onMarkerClick(Marker marker) { marker.showInfoWindow(); return false; } }); } @Override protected void initData() { for (int i = 0; i < 10; i++) { defaultLat += addNum; locusPointBeanList.add(new LocusPointBean(defaultLat, defaultLng, false)); } for (int i = 0; i < 10; i++) { defaultLng += addNum; locusPointBeanList.add(new LocusPointBean(defaultLat, defaultLng, false)); } options = new PolylineOptions(); //请求获得坐标所属的地理位置逆编码 getLocationDescribe(); getAddresses(); } @Override protected void initView() { setUpMapIfNeeded(); } @Override protected void init(Bundle savedInstanceState) { amapMapView.onCreate(savedInstanceState); myApplication.getEventBus().register(this); } @OnClick({R.id.title_return_iv}) public void onClick(View view) { switch (view.getId()) { case R.id.title_return_iv: this.finish(); break; } } private void setUpMapIfNeeded() { if (aMap == null) { aMap = amapMapView.getMap(); } aMap.setInfoWindowAdapter(this);// 设置自定义InfoWindow样式 //aMap.set; UiSettings mUiSettings = aMap.getUiSettings(); //隐藏地图上的 + - 图标 mUiSettings.setZoomControlsEnabled(false); setLocationIcon(); //获取传递过来的基本信息 aMap.moveCamera(CameraUpdateFactory.newLatLngZoom(new LatLng(defaultLat, defaultLng), LocationUtil.MAP_LEVEL)); } /** * 设置自定义的定位图标 */ private void setLocationIcon() { //初始化手表坐标位置marker locusMarkerOption = new MarkerOptions(); } private void startPlayLocus() { playLocus = new ReadThread("playLocus"); playLocus.start(); } @Override public View getInfoWindow(Marker marker) { return getInfoWindowView(marker); } @Override public View getInfoContents(Marker marker) { return null; } /** * 自定义地图Marker的显示view * * @param marker marker * @return view */ private View getInfoWindowView(Marker marker) { LogUtils.d("getInfoWindowView:" + marker.toString()); View inflate = this.getLayoutInflater().inflate(R.layout.map_marker_popwindow, null); renderLocationWindow(marker, inflate); return inflate; } private void renderLocationWindow(Marker marker, View inflate) { TextView markerWindowLocationDes = (TextView) inflate.findViewById(R.id.marker_window_location_des); markerWindowLocationDes.setText(marker.getSnippet()); } @Override public void onRegeocodeSearched(RegeocodeResult regeocodeResult, int i) { } @Override public void onGeocodeSearched(GeocodeResult geocodeResult, int i) { } /** * 通过子线程进行播放轨迹操作 */ public class ReadThread implements Runnable { Thread thread; private String threadName; private boolean suspended = false; private boolean end = false; ReadThread(String threadName) { this.threadName = threadName; LogUtils.d("Creating " + threadName); } /** * 执行内容 */ public void run() { for (int i = 0; i < locusPointBeanList.size(); i++) { try { synchronized (this) { while (suspended) { wait(); } } if (Thread.interrupted()) { LogUtils.d("Thread1" + threadName + " exiting."); break; } if (i == (locusPointBeanList.size() - 1)) { //最后一条数据,发送改变文字标识通知 LogUtils.d("locus play over"); locusPointBeanList.get(i).setOver(true); } myApplication.getEventBus().post(locusPointBeanList.get(i)); Thread.sleep(threadRunTimeIntervalLong); LogUtils.d("threadRunTimeIntervalLong:" + threadRunTimeIntervalLong); if (threadRunTimeIntervalLong > threadRunTimeIntervalShort) { threadRunTimeIntervalLong -= threadRunTimeInterval; } } catch (InterruptedException e) { LogUtils.d("Thread " + threadName + " interrupted."); e.printStackTrace(); Thread.currentThread().interrupt(); } if (end) { break; } } LogUtils.d("Thread " + threadName + " exiting."); } /** * 开始 */ void start() { LogUtils.d("Starting " + threadName); if (thread == null) { thread = new Thread(this, threadName); thread.start(); } } /** * 暂停 */ void suspend() { if (thread != null) { suspended = true; } } /** * 继续 */ synchronized void resume() { if (thread != null && suspended) { suspended = false; notify(); } } void endThread() { end = true; thread.interrupt(); thread = null; } } /** * 请求获得坐标所属的地理位置逆编码 */ private void getLocationDescribe() { geocoderSearch = new GeocodeSearch(this); } /** * 响应逆地理编码的批量请求 */ private void getAddresses() { if (mExecutorService == null) { mExecutorService = Executors.newSingleThreadExecutor(); } for (int i = 0; i < locusPointBeanList.size(); i++) { final LocusPointBean point = locusPointBeanList.get(i); final LatLonPoint latLonPoint = new LatLonPoint(point.getLat(), point.getLng()); mExecutorService.submit(new Runnable() { @Override public void run() { try { RegeocodeQuery query = new RegeocodeQuery(latLonPoint, 10, GeocodeSearch.AMAP);// 第一个参数表示一个Latlng,第二参数表示范围多少米,第三个参数表示是火系坐标系还是GPS原生坐标系 RegeocodeAddress result = geocoderSearch.getFromLocation(query);// 设置同步逆地理编码请求 if (result != null && result.getFormatAddress() != null ) { point.setOver(false); point.setLocationDescribe(result.getFormatAddress()); // addWatchMarkersToMap(point, false, 0); LogUtils.d("轨迹界面,批量获取逆地理编码:" + result.getFormatAddress() + ",坐标:" + point.toString()); } } catch (AMapException e) { LogUtils.d("轨迹界面,批量获取逆地理编码异常,异常码:" + e.getErrorCode()); } } }); } } @Subscribe(threadMode = ThreadMode.MAIN, priority = 20) public void getLocusLocation(LocusPointBean result) { if (result != null) { LogUtils.d("event事件:LocusActivity"); countMarker++; LatLng latLng = new LatLng(result.getLat(), result.getLng()); addWatchMarkersToMap(result, true, countMarker); //绘制线 if (lastLatlng != null) { //存在上一点记录,则开始绘制 options.add(lastLatlng); } else { //说明是第一个点,不进行绘制 } options.add(latLng); aMap.addPolyline(options.width(10).color(ContextCompat.getColor(mContext, R.color.purple_2))); aMap.animateCamera(CameraUpdateFactory.newLatLngZoom( latLng, LocationUtil.MAP_LEVEL)); aMapAppLocationDesc.setText(result.getLocationDescribe()); if (result.isOver()) { //如果是最后一条数据,还需要更改按钮文字 result.setOver(false); //播放时间间隔设置为默认值 threadRunTimeIntervalLong = 1700; lastLatlng = null; countMarker = 0; //恢复开始播放前的状态,此处屏蔽,不恢复 // new Handler().postDelayed(new Runnable() { // @Override // public void run() { // clearAllMarkersToMap(); // for (LocationHistoryBean bean : locationHistoryBeanList) { // addWatchMarkersToMap(bean, false, 0); // } // // } // }, threadRunTimeIntervalLong); options = new PolylineOptions(); } } } /** * 在地图上添加marker * * @param playLocus playLocus */ private void addWatchMarkersToMap(LocusPointBean playLocus, boolean isShowNum, int num) { if (isShowNum) { markerView = View.inflate(mContext, R.layout.map_markers_view, null); markerNumTv = (TextView) markerView.findViewById(R.id.marker_num_tv); markerNumTv.setText(String.valueOf(num)); locusMarkerOption.icon(BitmapDescriptorFactory.fromView(markerView)); } else { locusMarkerOption.icon(BitmapDescriptorFactory.fromResource(R.drawable.img_current_location)); } locusMarkerOption .snippet(playLocus.getLocationDescribe()) .position(new LatLng(playLocus.getLat(), playLocus.getLng())) .draggable(false); aMap.addMarker(locusMarkerOption); } /** * 清除地图上所有的marker */ public void clearAllMarkersToMap() { aMap.clear(); } @Override protected void onPause() { super.onPause(); amapMapView.onPause(); if (playLocus != null) { playLocus.suspend(); } } @Override protected void onResume() { super.onResume(); amapMapView.onResume(); if (playLocus != null) { playLocus.resume(); } } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); amapMapView.onSaveInstanceState(outState); } @Override protected void onDestroy() { if (playLocus != null) { playLocus.endThread(); playLocus = null; } if (mExecutorService != null) { mExecutorService.shutdown(); mExecutorService.shutdownNow(); mExecutorService = null; } amapMapView.onDestroy(); LogUtils.d("locus destroy 轨迹界面 "); super.onDestroy(); } }
Tomorrowhi/THDemo
THDemo/app/src/main/java/com/tomorrowhi/thdemo/activitys/AMapLocusActivity.java
Java
apache-2.0
14,930
# coding=utf-8 import json from django.utils.translation import ugettext_lazy as _ from django.http import HttpResponse import django.views from django.template import defaultfilters as template_filters from horizon import tables from horizon import exceptions from cloudkittydashboard.api import cloudkitty as api from openstack_dashboard.api import keystone from cloudkittydashboard.dashboards.project.billing_overview import tables as project_tables import time from datetime import date, timedelta, datetime import calendar from django.http import JsonResponse,HttpResponse import json import xlsxwriter import StringIO import logging LOG = logging.getLogger(__name__) def detail(request, org_id): if org_id == None: org_id = get_tenant_id(request) try: details = api.cloudkittyclient(request).billings.list_services_cost(get_month(request), org_id) except Exception: details = [] exceptions.handle(request, _('Unable to retrieve billing list.')) return HttpResponse(json.dumps(details),content_type="application/json") class IndexView(tables.DataTableView): # A very simple class-based view... template_name = "project/billing_overview/index.html" table_class = project_tables.BillingOverviewTable page_title = _("Billing Overview") def get_context_data(self, **kwargs): context = super(IndexView, self).get_context_data(**kwargs) context["tenant_id"] = get_tenant_id(self.request) context["selected_month"] = get_month(self.request) context["organizations"] = get_tenant_list(self.request) year = time.strftime("%Y",time.localtime()) month = time.strftime("%m",time.localtime()) if int(month) == 1: last_month = 12 last_year = int(year) - 1 else: last_month = int(month) - 1 last_year = year try: context["year_begin"] = str((int(year)-1)) + "/" + str((int(month))) context["year_end"] = str(last_year) + "/" + str(last_month) # get last 12 months total cost total_year = api.cloudkittyclient(self.request).billings.get_consumer_trends("month", 12, get_tenant_id(self.request)) year_sum = 0 for billing_month in total_year["consumerTrends"]: year_sum += billing_month["cost"] context["billing_year"] = year_sum #get current month cost context["time_current_month"] = year+"/"+month services_rate_list = api.cloudkittyclient(self.request).billings.list_services_cost(year+"-"+month, get_tenant_id(self.request)) current_sum = 0 for rate in services_rate_list["servicesRate"]: current_sum += rate["rate"] context["billing_current_month"] = current_sum #get last month cost context["time_last_month"] = str(last_year)+"/"+str(last_month) context["billing_last_month"] = api.cloudkittyclient(self.request).billings.get_consumer_trends("month", 1, get_tenant_id(self.request))["consumerTrends"][0]["cost"] except Exception: exceptions.handle(self.request,_("Unable to retrieve month cost")) today = date.today() context["last_12_months"] = last_12_months() return context; def get_data(self): try: billings = api.cloudkittyclient(self.request).billings.get_total_cost(get_month(self.request), get_tenant_id(self.request))["totals"] except Exception: billings = [] exceptions.handle(self.request, _('Unable to retrieve billing list.')) return billings class ReportView(django.views.generic.TemplateView): def get(self,request,*args,**kwargs): tenant_id = get_tenant_id(self.request) billing_month = get_month(self.request) tenants = get_tenant_list(self.request) for tenant in tenants: if tenant.id == tenant_id: tenant_name = tenant.name break reports = api.cloudkittyclient(self.request).billings.list_month_report(tenant_id,billing_month) output = StringIO.StringIO() workbook = xlsxwriter.Workbook(output) month_sheet = workbook.add_worksheet(tenant_name) #设置列宽度 month_sheet.set_column('A:Z',9) #表头 head = (u'部门',u'资源', u'1月',u'2月',u'3月', u'1Q合计', u'4月',u'5月',u'6月', u'2Q合计', u'上半年计', u'7月',u'8月',u'9月', u'3Q合计', u'10月',u'11月',u'12月',u'4Q合计',u'下半年计',u'全年合计' ) # 设置表头字符串和格式 head_format = workbook.add_format({ 'bold':True, 'font_size':20, 'font_name':'Microsoft YaHei' }) row = 1 col = 0 head_str = billing_month.split('-')[0] + u'年度月别计费一览表' head_str1 = u'资源及使用费用情况' month_sheet.write(row,col,head_str,head_format) row += 1 month_sheet.write(row,col,u'如需查看季、年度合计,请在月份对应位置取消隐藏') row += 2 month_sheet.write(row,col,head_str1,head_format) explain_format = workbook.add_format({'align':'right'}) year_month = billing_month.split('-') if billing_month == template_filters.date(date.today(), "Y-m"): tab_date = u'制表日期:%d月%d日' %(int(year_month[1]),date.today().day-1) else: tab_date = u'制表日期:%d月%d日' %(int(year_month[1]),calendar.monthrange(int(year_month[0]),int(year_month[1]))[1]) month_sheet.write(row,len(head)-1,u'单位:元 ' + tab_date, explain_format) row += 1 col = 0 head2_format = workbook.add_format({ 'bold':True, 'align':'center', 'valign':'vcenter', 'bg_color':'#D8E4BC', 'left':1, 'font_name':'Microsoft YaHei' }) #设置行高 month_sheet.set_row(row,30) for index_str in head: month_sheet.write(row,col,index_str,head2_format) col += 1 row += 1 month_sheet.set_column('A:A',15) #资源和合计所占行数 names = ['Compute','Volume',u'合计'] even_format = workbook.add_format({ 'border':1, 'font_name':'Microsoft YaHei', 'num_format': '#,##0.00' }) odd_format=workbook.add_format({ 'border':1, 'font_name':'Microsoft YaHei', 'bg_color':'#D9D9D9', 'num_format': '#,##0.00' }) resource_total_rows = 3 # 处理每个部门 merge_format = workbook.add_format({ 'bold':True, 'font_name':'Microsoft YaHei', 'font_size':14, 'align':'center', 'valign':'vcenter', 'border':1 }) for depart in reports['departs']: col = 1 for index,name in enumerate(names): if index % 2 != 0: month_sheet.set_row(row+index,None,odd_format) else: month_sheet.set_row(row+index,None,even_format) month_sheet.write(row+index,col,name) month_sheet.merge_range(row,0,row+resource_total_rows-1,0,depart['tenant_name'],merge_format) tmp_row = row write_col = col + 1 for month_report in depart['month_reports']: for res_tpye in month_report['res_types']: if res_tpye['res_type'] == "compute": write_row = tmp_row elif res_tpye['res_type'] == "volume": write_row = tmp_row + 1 month_sheet.write(write_row,write_col,res_tpye['rate']) write_col += 1 month = int(month_report["month"].split('-')[1]) if month == 3: for index in range(resource_total_rows-1): index_row = tmp_row + index month_sheet.write(index_row,write_col,'=SUM(C' + str(index_row+1) + ':E' + str(index_row+1) + ')') write_col += 1 elif month == 6: for index in range(resource_total_rows-1): index_row = tmp_row + index month_sheet.write(index_row,write_col,'=SUM(G' + str(index_row+1) + ':I' + str(index_row+1) + ')') month_sheet.write(index_row,write_col+1,'=SUM(F' + str(index_row+1) + '+J' + str(index_row+1) + ')') write_col += 2 elif month == 9: for index in range(resource_total_rows-1): index_row = tmp_row + index month_sheet.write(index_row,write_col,'=SUM(L' + str(index_row+1) + ':N' + str(index_row+1) + ')') write_col += 1 elif month == 12: for index in range(resource_total_rows-1): index_row = tmp_row + index month_sheet.write(index_row,write_col,'=SUM(P' + str(index_row+1) + ':R' + str(index_row+1) + ')') month_sheet.write(index_row,write_col+1,'=SUM(O' + str(index_row+1) + '+S' + str(index_row+1) + ')') month_sheet.write(index_row,write_col+2,'=SUM(K' + str(index_row+1) + '+T' + str(index_row+1) + ')') write_col += 3 #处理后面的年统计和季度统计 for month in range(1,13): if month == 3: for index in range(resource_total_rows-1): index_row = tmp_row + index month_sheet.write(index_row,5,'=SUM(C' + str(index_row+1) + ':E' + str(index_row+1) + ')') elif month == 6: for index in range(resource_total_rows-1): index_row = tmp_row + index month_sheet.write(index_row,9,'=SUM(G' + str(index_row+1) + ':I' + str(index_row+1) + ')') month_sheet.write(index_row,10,'=SUM(F' + str(index_row+1) + '+J' + str(index_row+1) + ')') elif month == 9: for index in range(resource_total_rows-1): index_row = tmp_row + index month_sheet.write(index_row,14,'=SUM(L' + str(index_row+1) + ':N' + str(index_row+1) + ')') elif month == 12: for index in range(resource_total_rows-1): index_row = tmp_row + index month_sheet.write(index_row,18,'=SUM(P' + str(index_row+1) + ':R' + str(index_row+1) + ')') month_sheet.write(index_row,19,'=SUM(O' + str(index_row+1) + '+S' + str(index_row+1) + ')') month_sheet.write(index_row,20,'=SUM(K' + str(index_row+1) + '+T' + str(index_row+1) + ')') month_sheet.write_array_formula('C' + str(tmp_row + resource_total_rows) + ':U' + str(tmp_row + resource_total_rows ), '{=C' + str(tmp_row + 1) + ':U' + str(tmp_row + 1) + '+' \ + 'C' + str(tmp_row + resource_total_rows - 1) + ':U' + str(tmp_row + resource_total_rows - 1) + '}') #跳过资源种类数目和合计的行 row = row + resource_total_rows #部门之间中间隔一行 row += 1 month_sheet.print_area(0,0,row,len(head)-1) month_sheet.fit_to_pages(1,1) month_sheet.freeze_panes(0,1) month_sheet.hide_zero() month_sheet.set_column('F:F',None,None,{'hidden':1}) month_sheet.set_column('J:J',None,None,{'hidden':1}) month_sheet.set_column('K:K',None,None,{'hidden':1}) month_sheet.set_column('O:O',None,None,{'hidden':1}) month_sheet.set_column('S:S',None,None,{'hidden':1}) month_sheet.set_column('T:T',None,None,{'hidden':1}) month_sheet.set_column('V:XFD',None,None,{'hidden':1}) workbook.close() output.seek(0) response = HttpResponse(output.read()) response['Content-type']="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" response['Content-Disposition'] = "attachment; filename=" + str(billing_month) +"-report.xlsx" return response class TrendsView(django.views.generic.TemplateView): def get(self, request, *args, **kwargs): tenant_id = request.GET.get("tenant_id", request.user.tenant_id) time_series = request.GET.get("time_series", "month") try: trends = api.cloudkittyclient(self.request).billings.get_consumer_trends(time_series, 12, get_tenant_id(self.request)) except Exception: trends = {} exceptions.handle(request,_("Unable to retrieve trend data")) # convert time and cost to x and y for trend in trends["consumerTrends"]: if time_series == u'month': trend.update(x=time.strftime('%Y-%m-%dT%H:%M:%S%Z',time.strptime(trend.pop("time"),"%Y-%m")),y=trend.pop("cost")) elif time_series == u'day': trend.update(x=time.strftime('%Y-%m-%dT%H:%M:%S%Z',time.strptime(trend.pop("time"),"%Y-%m-%d")),y=trend.pop("cost")) ret = {'series': [{ 'name': 'admin', 'unit': 'CNY', 'time_series': time_series, 'data': trends["consumerTrends"] }], 'settings': { 'verbose_date': False }} return HttpResponse(json.dumps(ret), content_type='application/json') def get_month(request): try: month = request.GET.get("month", "%s-%s" % (date.today().year, date.today().month)) return month except Exception: return None def get_tenant_id(request): return request.GET.get("tenant_id", request.user.tenant_id) def get_tenant_list(request): return sorted(request.user.authorized_tenants, reverse=False, key=lambda x: getattr(x, "sortNumber", 0)) def last_12_months(): def back_months(dt, months): month = (dt.month - months) or 12 year = dt.year - month / 12 return dt.replace(year=year, month=month, day=1) date = datetime.today() date_choices = [date] for i in range(1, 12): date = back_months(date, 1) date_choices.append(date) return date_choices
FNST-OpenStack/cloudkitty-dashboard
cloudkittydashboard/dashboards/project/billing_overview/views.py
Python
apache-2.0
14,849
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto package com.google.cloud.compute.v1; /** * * * <pre> * A request message for Projects.GetXpnHost. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.GetXpnHostProjectRequest} */ public final class GetXpnHostProjectRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.GetXpnHostProjectRequest) GetXpnHostProjectRequestOrBuilder { private static final long serialVersionUID = 0L; // Use GetXpnHostProjectRequest.newBuilder() to construct. private GetXpnHostProjectRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GetXpnHostProjectRequest() { project_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GetXpnHostProjectRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetXpnHostProjectRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 1820481738: { java.lang.String s = input.readStringRequireUtf8(); project_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetXpnHostProjectRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetXpnHostProjectRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.GetXpnHostProjectRequest.class, com.google.cloud.compute.v1.GetXpnHostProjectRequest.Builder.class); } public static final int PROJECT_FIELD_NUMBER = 227560217; private volatile java.lang.Object project_; /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The project. */ @java.lang.Override public java.lang.String getProject() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for project. */ @java.lang.Override public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.GetXpnHostProjectRequest)) { return super.equals(obj); } com.google.cloud.compute.v1.GetXpnHostProjectRequest other = (com.google.cloud.compute.v1.GetXpnHostProjectRequest) obj; if (!getProject().equals(other.getProject())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PROJECT_FIELD_NUMBER; hash = (53 * hash) + getProject().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.compute.v1.GetXpnHostProjectRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request message for Projects.GetXpnHost. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.GetXpnHostProjectRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.GetXpnHostProjectRequest) com.google.cloud.compute.v1.GetXpnHostProjectRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetXpnHostProjectRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetXpnHostProjectRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.GetXpnHostProjectRequest.class, com.google.cloud.compute.v1.GetXpnHostProjectRequest.Builder.class); } // Construct using com.google.cloud.compute.v1.GetXpnHostProjectRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); project_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetXpnHostProjectRequest_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.GetXpnHostProjectRequest getDefaultInstanceForType() { return com.google.cloud.compute.v1.GetXpnHostProjectRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.GetXpnHostProjectRequest build() { com.google.cloud.compute.v1.GetXpnHostProjectRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.GetXpnHostProjectRequest buildPartial() { com.google.cloud.compute.v1.GetXpnHostProjectRequest result = new com.google.cloud.compute.v1.GetXpnHostProjectRequest(this); result.project_ = project_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.GetXpnHostProjectRequest) { return mergeFrom((com.google.cloud.compute.v1.GetXpnHostProjectRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.compute.v1.GetXpnHostProjectRequest other) { if (other == com.google.cloud.compute.v1.GetXpnHostProjectRequest.getDefaultInstance()) return this; if (!other.getProject().isEmpty()) { project_ = other.project_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.compute.v1.GetXpnHostProjectRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.compute.v1.GetXpnHostProjectRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The project. */ public java.lang.String getProject() { java.lang.Object ref = project_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for project. */ public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The project to set. * @return This builder for chaining. */ public Builder setProject(java.lang.String value) { if (value == null) { throw new NullPointerException(); } project_ = value; onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearProject() { project_ = getDefaultInstance().getProject(); onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for project to set. * @return This builder for chaining. */ public Builder setProjectBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); project_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.GetXpnHostProjectRequest) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.GetXpnHostProjectRequest) private static final com.google.cloud.compute.v1.GetXpnHostProjectRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.GetXpnHostProjectRequest(); } public static com.google.cloud.compute.v1.GetXpnHostProjectRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GetXpnHostProjectRequest> PARSER = new com.google.protobuf.AbstractParser<GetXpnHostProjectRequest>() { @java.lang.Override public GetXpnHostProjectRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetXpnHostProjectRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<GetXpnHostProjectRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GetXpnHostProjectRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.GetXpnHostProjectRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/java-compute
proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/GetXpnHostProjectRequest.java
Java
apache-2.0
21,025
<?php use Illuminate\Database\Schema\Blueprint; use Illuminate\Database\Migrations\Migration; class AddScoresTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('scores', function (Blueprint $table) { $table->increments('id'); $table->string('name'); $table->string('email'); $table->integer('score'); $table->timestamps(); }); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::drop('scores'); } }
JohannesSanders/Snake-Angular-Laravel
snake/database/migrations/2015_08_14_144858_add_scores_table.php
PHP
apache-2.0
650
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * AssignPrivateIpAddressesSetItemRequestType.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST) */ package com.amazon.ec2; /** * AssignPrivateIpAddressesSetItemRequestType bean class */ public class AssignPrivateIpAddressesSetItemRequestType implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = AssignPrivateIpAddressesSetItemRequestType Namespace URI = http://ec2.amazonaws.com/doc/2012-08-15/ Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for PrivateIpAddress */ protected java.lang.String localPrivateIpAddress ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getPrivateIpAddress(){ return localPrivateIpAddress; } /** * Auto generated setter method * @param param PrivateIpAddress */ public void setPrivateIpAddress(java.lang.String param){ this.localPrivateIpAddress=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { AssignPrivateIpAddressesSetItemRequestType.this.serialize(parentQName,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( parentQName,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2012-08-15/"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":AssignPrivateIpAddressesSetItemRequestType", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "AssignPrivateIpAddressesSetItemRequestType", xmlWriter); } } namespace = "http://ec2.amazonaws.com/doc/2012-08-15/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"privateIpAddress", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"privateIpAddress"); } } else { xmlWriter.writeStartElement("privateIpAddress"); } if (localPrivateIpAddress==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("privateIpAddress cannot be null!!"); }else{ xmlWriter.writeCharacters(localPrivateIpAddress); } xmlWriter.writeEndElement(); xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/", "privateIpAddress")); if (localPrivateIpAddress != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localPrivateIpAddress)); } else { throw new org.apache.axis2.databinding.ADBException("privateIpAddress cannot be null!!"); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static AssignPrivateIpAddressesSetItemRequestType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ AssignPrivateIpAddressesSetItemRequestType object = new AssignPrivateIpAddressesSetItemRequestType(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"AssignPrivateIpAddressesSetItemRequestType".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (AssignPrivateIpAddressesSetItemRequestType)com.amazon.ec2.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","privateIpAddress").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setPrivateIpAddress( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement()) // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
mufaddalq/cloudstack-datera-driver
awsapi/src/com/amazon/ec2/AssignPrivateIpAddressesSetItemRequestType.java
Java
apache-2.0
22,350
<#escape x as x?html> <p> <b>Hello, ${user.getFirstName()} ${user.getLastName()}.</b> </p> <p> Welcome to ${config.get('application.titleCommon')}. </p> <p> Please confirm your email address by following this link in your browser:<br/><br/> <a href="${confirmUrl}">confirm your email</a>. </p> <p> You may also confirm your email by entering the following code on the confirmation page:<br /> <b>${verificationCode}</b> </p> <p> Once your e-mail address has been confirmed, you will be allowed to login into ${config.get('application.titleCommon')}. </p> <p> Thank you.<br/> <a href="${indexUrl}">${config.get('application.titleCommon')}</a> </p> </#escape>
janenik/ninja-sso-um
src/main/java/views/sso/mail/signUpConfirmation.en.ftl.html
HTML
apache-2.0
791
/* * Copyright 2009, The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.commands.monkey; import android.content.Context; import android.os.IPowerManager; import android.os.RemoteException; import android.os.ServiceManager; import android.os.SystemClock; import android.util.Log; import android.view.KeyCharacterMap; import android.view.KeyEvent; import android.view.MotionEvent; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.lang.Integer; import java.lang.NumberFormatException; import java.net.InetAddress; import java.net.ServerSocket; import java.net.Socket; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.LinkedList; import java.util.List; import java.util.Queue; import java.util.StringTokenizer; /** * An Event source for getting Monkey Network Script commands from * over the network. */ public class MonkeySourceNetwork implements MonkeyEventSource { private static final String TAG = "MonkeyStub"; /* The version of the monkey network protocol */ public static final int MONKEY_NETWORK_VERSION = 2; private static DeferredReturn deferredReturn; /** * ReturnValue from the MonkeyCommand that indicates whether the * command was sucessful or not. */ public static class MonkeyCommandReturn { private final boolean success; private final String message; public MonkeyCommandReturn(boolean success) { this.success = success; this.message = null; } public MonkeyCommandReturn(boolean success, String message) { this.success = success; this.message = message; } boolean hasMessage() { return message != null; } String getMessage() { return message; } boolean wasSuccessful() { return success; } } public final static MonkeyCommandReturn OK = new MonkeyCommandReturn(true); public final static MonkeyCommandReturn ERROR = new MonkeyCommandReturn(false); public final static MonkeyCommandReturn EARG = new MonkeyCommandReturn(false, "Invalid Argument"); /** * Interface that MonkeyCommands must implement. */ public interface MonkeyCommand { /** * Translate the command line into a sequence of MonkeyEvents. * * @param command the command line. * @param queue the command queue. * @return MonkeyCommandReturn indicating what happened. */ MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue); } /** * Command to simulate closing and opening the keyboard. */ private static class FlipCommand implements MonkeyCommand { // flip open // flip closed public MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue) { if (command.size() > 1) { String direction = command.get(1); if ("open".equals(direction)) { queue.enqueueEvent(new MonkeyFlipEvent(true)); return OK; } else if ("close".equals(direction)) { queue.enqueueEvent(new MonkeyFlipEvent(false)); return OK; } } return EARG; } } /** * Command to send touch events to the input system. */ private static class TouchCommand implements MonkeyCommand { // touch [down|up|move] [x] [y] // touch down 120 120 // touch move 140 140 // touch up 140 140 public MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue) { if (command.size() == 4) { String actionName = command.get(1); int x = 0; int y = 0; try { x = Integer.parseInt(command.get(2)); y = Integer.parseInt(command.get(3)); } catch (NumberFormatException e) { // Ok, it wasn't a number Log.e(TAG, "Got something that wasn't a number", e); return EARG; } // figure out the action int action = -1; if ("down".equals(actionName)) { action = MotionEvent.ACTION_DOWN; } else if ("up".equals(actionName)) { action = MotionEvent.ACTION_UP; } else if ("move".equals(actionName)) { action = MotionEvent.ACTION_MOVE; } if (action == -1) { Log.e(TAG, "Got a bad action: " + actionName); return EARG; } queue.enqueueEvent(new MonkeyTouchEvent(action) .addPointer(0, x, y)); return OK; } return EARG; } } /** * Command to send hover events to the input system. */ private static class HoverCommand implements MonkeyCommand { // hover [enter|exit|move] [x] [y] // hover enter 120 120 // hover move 140 140 // hover exit 140 140 public MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue) { if (command.size() == 4) { String actionName = command.get(1); int x = 0; int y = 0; try { x = Integer.parseInt(command.get(2)); y = Integer.parseInt(command.get(3)); } catch (NumberFormatException e) { // Ok, it wasn't a number Log.e(TAG, "Got something that wasn't a number", e); return EARG; } // figure out the action int action = -1; if ("enter".equals(actionName)) { action = MotionEvent.ACTION_HOVER_ENTER; } else if ("exit".equals(actionName)) { action = MotionEvent.ACTION_HOVER_EXIT; } else if ("move".equals(actionName)) { action = MotionEvent.ACTION_HOVER_MOVE; } if (action == -1) { Log.e(TAG, "Got a bad action: " + actionName); return EARG; } queue.enqueueEvent(new MonkeyHoverEvent(action) .addPointer(0, x, y)); return OK; } return EARG; } } /** * Command to send Trackball events to the input system. */ private static class TrackballCommand implements MonkeyCommand { // trackball [dx] [dy] // trackball 1 0 -- move right // trackball -1 0 -- move left public MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue) { if (command.size() == 3) { int dx = 0; int dy = 0; try { dx = Integer.parseInt(command.get(1)); dy = Integer.parseInt(command.get(2)); } catch (NumberFormatException e) { // Ok, it wasn't a number Log.e(TAG, "Got something that wasn't a number", e); return EARG; } queue.enqueueEvent(new MonkeyTrackballEvent(MotionEvent.ACTION_MOVE) .addPointer(0, dx, dy)); return OK; } return EARG; } } /** * Command to send Key events to the input system. */ private static class KeyCommand implements MonkeyCommand { // key [down|up] [keycode] // key down 82 // key up 82 public MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue) { if (command.size() == 3) { int keyCode = getKeyCode(command.get(2)); if (keyCode < 0) { // Ok, you gave us something bad. Log.e(TAG, "Can't find keyname: " + command.get(2)); return EARG; } Log.d(TAG, "keycode: " + keyCode); int action = -1; if ("down".equals(command.get(1))) { action = KeyEvent.ACTION_DOWN; } else if ("up".equals(command.get(1))) { action = KeyEvent.ACTION_UP; } if (action == -1) { Log.e(TAG, "got unknown action."); return EARG; } queue.enqueueEvent(new MonkeyKeyEvent(action, keyCode)); return OK; } return EARG; } } /** * Get an integer keycode value from a given keyname. * * @param keyName the key name to get the code for * @return the integer keycode value, or -1 on error. */ private static int getKeyCode(String keyName) { int keyCode = -1; try { keyCode = Integer.parseInt(keyName); } catch (NumberFormatException e) { // Ok, it wasn't a number, see if we have a // keycode name for it keyCode = MonkeySourceRandom.getKeyCode(keyName); if (keyCode == KeyEvent.KEYCODE_UNKNOWN) { // OK, one last ditch effort to find a match. // Build the KEYCODE_STRING from the string // we've been given and see if that key // exists. This would allow you to do "key // down menu", for example. keyCode = MonkeySourceRandom.getKeyCode("KEYCODE_" + keyName.toUpperCase()); if (keyCode == KeyEvent.KEYCODE_UNKNOWN) { // Still unknown return -1; } } } return keyCode; } /** * Command to put the Monkey to sleep. */ private static class SleepCommand implements MonkeyCommand { // sleep 2000 public MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue) { if (command.size() == 2) { int sleep = -1; String sleepStr = command.get(1); try { sleep = Integer.parseInt(sleepStr); } catch (NumberFormatException e) { Log.e(TAG, "Not a number: " + sleepStr, e); return EARG; } queue.enqueueEvent(new MonkeyThrottleEvent(sleep)); return OK; } return EARG; } } /** * Command to type a string */ private static class TypeCommand implements MonkeyCommand { // wake public MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue) { if (command.size() == 2) { String str = command.get(1); char[] chars = str.toString().toCharArray(); // Convert the string to an array of KeyEvent's for // the built in keymap. KeyCharacterMap keyCharacterMap = KeyCharacterMap. load(KeyCharacterMap.VIRTUAL_KEYBOARD); KeyEvent[] events = keyCharacterMap.getEvents(chars); // enqueue all the events we just got. for (KeyEvent event : events) { queue.enqueueEvent(new MonkeyKeyEvent(event)); } return OK; } return EARG; } } /** * Command to wake the device up */ private static class WakeCommand implements MonkeyCommand { // wake public MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue) { if (!wake()) { return ERROR; } return OK; } } /** * Command to "tap" at a location (Sends a down and up touch * event). */ private static class TapCommand implements MonkeyCommand { // tap x y public MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue) { if (command.size() == 3) { int x = 0; int y = 0; try { x = Integer.parseInt(command.get(1)); y = Integer.parseInt(command.get(2)); } catch (NumberFormatException e) { // Ok, it wasn't a number Log.e(TAG, "Got something that wasn't a number", e); return EARG; } queue.enqueueEvent(new MonkeyTouchEvent(MotionEvent.ACTION_DOWN) .addPointer(0, x, y)); queue.enqueueEvent(new MonkeyTouchEvent(MotionEvent.ACTION_UP) .addPointer(0, x, y)); return OK; } return EARG; } } /** * Command to "float" at a location (Sends a enter and exit hover * event with t ms sleep between them). */ private static class FloatCommand implements MonkeyCommand { // float x y t public MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue) { if (command.size() == 4) { int x = 0; int y = 0; long t = 0; try { x = Integer.parseInt(command.get(1)); y = Integer.parseInt(command.get(2)); t = Long.parseLong(command.get(3)); } catch (NumberFormatException e) { // Ok, it wasn't a number Log.e(TAG, "Got something that wasn't a number", e); return EARG; } // Set the default parameters long enterTime = SystemClock.uptimeMillis(); queue.enqueueEvent(new MonkeyHoverEvent(MotionEvent.ACTION_HOVER_ENTER) .setDownTime(enterTime) .setEventTime(enterTime) .addPointer(0, x, y)); queue.enqueueEvent(new MonkeyWaitEvent(t)); queue.enqueueEvent(new MonkeyHoverEvent(MotionEvent.ACTION_HOVER_EXIT) .setDownTime(enterTime + t) .setEventTime(enterTime + t) .addPointer(0, x, y)); return OK; } return EARG; } } /** * Command to "press" a buttons (Sends an up and down key event.) */ private static class PressCommand implements MonkeyCommand { // press keycode public MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue) { if (command.size() == 2) { int keyCode = getKeyCode(command.get(1)); if (keyCode < 0) { // Ok, you gave us something bad. Log.e(TAG, "Can't find keyname: " + command.get(1)); return EARG; } queue.enqueueEvent(new MonkeyKeyEvent(KeyEvent.ACTION_DOWN, keyCode)); queue.enqueueEvent(new MonkeyKeyEvent(KeyEvent.ACTION_UP, keyCode)); return OK; } return EARG; } } /** * Command to defer the return of another command until the given event occurs. * deferreturn takes three arguments. It takes an event to wait for (e.g. waiting for the * device to display a different activity would the "screenchange" event), a * timeout, which is the number of microseconds to wait for the event to occur, and it takes * a command. The command can be any other Monkey command that can be issued over the network * (e.g. press KEYCODE_HOME). deferreturn will then run this command, return an OK, wait for * the event to occur and return the deferred return value when either the event occurs or * when the timeout is reached (whichever occurs first). Note that there is no difference * between an event occurring and the timeout being reached; the client will have to verify * that the change actually occured. * * Example: * deferreturn screenchange 1000 press KEYCODE_HOME * This command will press the home key on the device and then wait for the screen to change * for up to one second. Either the screen will change, and the results fo the key press will * be returned to the client, or the timeout will be reached, and the results for the key * press will be returned to the client. */ private static class DeferReturnCommand implements MonkeyCommand { // deferreturn [event] [timeout (ms)] [command] // deferreturn screenchange 100 tap 10 10 public MonkeyCommandReturn translateCommand(List<String> command, CommandQueue queue) { if (command.size() > 3) { String event = command.get(1); int eventId; if (event.equals("screenchange")) { eventId = DeferredReturn.ON_WINDOW_STATE_CHANGE; } else { return EARG; } long timeout = Long.parseLong(command.get(2)); MonkeyCommand deferredCommand = COMMAND_MAP.get(command.get(3)); if (deferredCommand != null) { List<String> parts = command.subList(3, command.size()); MonkeyCommandReturn ret = deferredCommand.translateCommand(parts, queue); deferredReturn = new DeferredReturn(eventId, ret, timeout); return OK; } } return EARG; } } /** * Force the device to wake up. * * @return true if woken up OK. */ private static final boolean wake() { IPowerManager pm = IPowerManager.Stub.asInterface(ServiceManager.getService(Context.POWER_SERVICE)); try { pm.wakeUp(SystemClock.uptimeMillis()); } catch (RemoteException e) { Log.e(TAG, "Got remote exception", e); return false; } return true; } // This maps from command names to command implementations. private static final Map<String, MonkeyCommand> COMMAND_MAP = new HashMap<String, MonkeyCommand>(); static { // Add in all the commands we support COMMAND_MAP.put("flip", new FlipCommand()); COMMAND_MAP.put("touch", new TouchCommand()); COMMAND_MAP.put("hover", new HoverCommand()); COMMAND_MAP.put("trackball", new TrackballCommand()); COMMAND_MAP.put("key", new KeyCommand()); COMMAND_MAP.put("sleep", new SleepCommand()); COMMAND_MAP.put("wake", new WakeCommand()); COMMAND_MAP.put("tap", new TapCommand()); COMMAND_MAP.put("float", new FloatCommand()); COMMAND_MAP.put("press", new PressCommand()); COMMAND_MAP.put("type", new TypeCommand()); COMMAND_MAP.put("listvar", new MonkeySourceNetworkVars.ListVarCommand()); COMMAND_MAP.put("getvar", new MonkeySourceNetworkVars.GetVarCommand()); COMMAND_MAP.put("listviews", new MonkeySourceNetworkViews.ListViewsCommand()); COMMAND_MAP.put("queryview", new MonkeySourceNetworkViews.QueryViewCommand()); COMMAND_MAP.put("getrootview", new MonkeySourceNetworkViews.GetRootViewCommand()); COMMAND_MAP.put("getviewswithtext", new MonkeySourceNetworkViews.GetViewsWithTextCommand()); COMMAND_MAP.put("deferreturn", new DeferReturnCommand()); } // QUIT command private static final String QUIT = "quit"; // DONE command private static final String DONE = "done"; // command response strings private static final String OK_STR = "OK"; private static final String ERROR_STR = "ERROR"; public static interface CommandQueue { /** * Enqueue an event to be returned later. This allows a * command to return multiple events. Commands using the * command queue still have to return a valid event from their * translateCommand method. The returned command will be * executed before anything put into the queue. * * @param e the event to be enqueued. */ public void enqueueEvent(MonkeyEvent e); }; // Queue of Events to be processed. This allows commands to push // multiple events into the queue to be processed. private static class CommandQueueImpl implements CommandQueue{ private final Queue<MonkeyEvent> queuedEvents = new LinkedList<MonkeyEvent>(); public void enqueueEvent(MonkeyEvent e) { queuedEvents.offer(e); } /** * Get the next queued event to excecute. * * @return the next event, or null if there aren't any more. */ public MonkeyEvent getNextQueuedEvent() { return queuedEvents.poll(); } }; // A holder class for a deferred return value. This allows us to defer returning the success of // a call until a given event has occurred. private static class DeferredReturn { public static final int ON_WINDOW_STATE_CHANGE = 1; private int event; private MonkeyCommandReturn deferredReturn; private long timeout; public DeferredReturn(int event, MonkeyCommandReturn deferredReturn, long timeout) { this.event = event; this.deferredReturn = deferredReturn; this.timeout = timeout; } /** * Wait until the given event has occurred before returning the value. * @return The MonkeyCommandReturn from the command that was deferred. */ public MonkeyCommandReturn waitForEvent() { switch(event) { case ON_WINDOW_STATE_CHANGE: try { synchronized(MonkeySourceNetworkViews.class) { MonkeySourceNetworkViews.class.wait(timeout); } } catch(InterruptedException e) { Log.d(TAG, "Deferral interrupted: " + e.getMessage()); } } return deferredReturn; } }; private final CommandQueueImpl commandQueue = new CommandQueueImpl(); private BufferedReader input; private PrintWriter output; private boolean started = false; private ServerSocket serverSocket; private Socket clientSocket; public MonkeySourceNetwork(int port) throws IOException { // Only bind this to local host. This means that you can only // talk to the monkey locally, or though adb port forwarding. serverSocket = new ServerSocket(port, 0, // default backlog InetAddress.getLocalHost()); } /** * Start a network server listening on the specified port. The * network protocol is a line oriented protocol, where each line * is a different command that can be run. * * @param port the port to listen on */ private void startServer() throws IOException { clientSocket = serverSocket.accept(); // At this point, we have a client connected. // Attach the accessibility listeners so that we can start receiving // view events. Do this before wake so we can catch the wake event // if possible. MonkeySourceNetworkViews.setup(); // Wake the device up in preparation for doing some commands. wake(); input = new BufferedReader(new InputStreamReader(clientSocket.getInputStream())); // auto-flush output = new PrintWriter(clientSocket.getOutputStream(), true); } /** * Stop the server from running so it can reconnect a new client. */ private void stopServer() throws IOException { clientSocket.close(); input.close(); output.close(); started = false; } /** * Helper function for commandLineSplit that replaces quoted * charaters with their real values. * * @param input the string to do replacement on. * @return the results with the characters replaced. */ private static String replaceQuotedChars(String input) { return input.replace("\\\"", "\""); } /** * This function splits the given line into String parts. It obey's quoted * strings and returns them as a single part. * * "This is a test" -> returns only one element * This is a test -> returns four elements * * @param line the line to parse * @return the List of elements */ private static List<String> commandLineSplit(String line) { ArrayList<String> result = new ArrayList<String>(); StringTokenizer tok = new StringTokenizer(line); boolean insideQuote = false; StringBuffer quotedWord = new StringBuffer(); while (tok.hasMoreTokens()) { String cur = tok.nextToken(); if (!insideQuote && cur.startsWith("\"")) { // begin quote quotedWord.append(replaceQuotedChars(cur)); insideQuote = true; } else if (insideQuote) { // end quote if (cur.endsWith("\"")) { insideQuote = false; quotedWord.append(" ").append(replaceQuotedChars(cur)); String word = quotedWord.toString(); // trim off the quotes result.add(word.substring(1, word.length() - 1)); } else { quotedWord.append(" ").append(replaceQuotedChars(cur)); } } else { result.add(replaceQuotedChars(cur)); } } return result; } /** * Translate the given command line into a MonkeyEvent. * * @param commandLine the full command line given. */ private void translateCommand(String commandLine) { // don't flood log cat. // Log.d(TAG, "translateCommand: " + commandLine); List<String> parts = commandLineSplit(commandLine); if (parts.size() > 0) { MonkeyCommand command = COMMAND_MAP.get(parts.get(0)); if (command != null) { MonkeyCommandReturn ret = command.translateCommand(parts, commandQueue); handleReturn(ret); } } } private void handleReturn(MonkeyCommandReturn ret) { if (ret.wasSuccessful()) { if (ret.hasMessage()) { returnOk(ret.getMessage()); } else { returnOk(); } } else { if (ret.hasMessage()) { returnError(ret.getMessage()); } else { returnError(); } } } private MonkeyEvent batchQueuedMotionEvents(MonkeyMotionEvent oldEvent) { if (oldEvent.getAction() == MotionEvent.ACTION_MOVE || oldEvent.getAction() == MotionEvent.ACTION_HOVER_MOVE) { MonkeyEvent queuedEvent = commandQueue.getNextQueuedEvent(); while (queuedEvent != null) { if (queuedEvent instanceof MonkeyMotionEvent) { MonkeyMotionEvent newEvent = (MonkeyMotionEvent) queuedEvent; if (newEvent.getAction() == oldEvent.getAction() && newEvent.getEventType() == oldEvent.getEventType()) { // TODO add new coordinates to old one queuedEvent = commandQueue.getNextQueuedEvent(); continue; } } return queuedEvent; // found new event type, discard old one } } return oldEvent; } public MonkeyEvent getNextEvent() { if (!started) { try { startServer(); } catch (IOException e) { Log.e(TAG, "Got IOException from server", e); return null; } started = true; } // Now, get the next command. This call may block, but that's OK try { while (true) { // Check to see if we have any events queued up. If // we do, use those until we have no more. Then get // more input from the user. MonkeyEvent queuedEvent = commandQueue.getNextQueuedEvent(); if (queuedEvent != null) { // batching move action events if (queuedEvent instanceof MonkeyMotionEvent) { return batchQueuedMotionEvents((MonkeyMotionEvent) queuedEvent); } // dispatch the event return queuedEvent; } // Check to see if we have any returns that have been deferred. If so, now that // we've run the queued commands, wait for the given event to happen (or the timeout // to be reached), and handle the deferred MonkeyCommandReturn. if (deferredReturn != null) { Log.d(TAG, "Waiting for event"); MonkeyCommandReturn ret = deferredReturn.waitForEvent(); deferredReturn = null; handleReturn(ret); } String command = input.readLine(); if (command == null) { Log.d(TAG, "Connection dropped."); // Treat this exactly the same as if the user had // ended the session cleanly with a done commant. command = DONE; } if (DONE.equals(command)) { // stop the server so it can accept new connections try { stopServer(); } catch (IOException e) { Log.e(TAG, "Got IOException shutting down!", e); return null; } // return a noop event so we keep executing the main // loop return new MonkeyNoopEvent(); } // Do quit checking here if (QUIT.equals(command)) { // then we're done Log.d(TAG, "Quit requested"); // let the host know the command ran OK returnOk(); return null; } // Do comment checking here. Comments aren't a // command, so we don't echo anything back to the // user. if (command.startsWith("#")) { // keep going continue; } // Translate the command line. This will handle returning error/ok to the user translateCommand(command); } } catch (IOException e) { Log.e(TAG, "Exception: ", e); return null; } } /** * Returns ERROR to the user. */ private void returnError() { output.println(ERROR_STR); } /** * Returns ERROR to the user. * * @param msg the error message to include */ private void returnError(String msg) { output.print(ERROR_STR); output.print(":"); output.println(msg); } /** * Returns OK to the user. */ private void returnOk() { output.println(OK_STR); } /** * Returns OK to the user. * * @param returnValue the value to return from this command. */ private void returnOk(String returnValue) { output.print(OK_STR); output.print(":"); output.println(returnValue); } public void setVerbose(int verbose) { // We're not particualy verbose } public boolean validate() { // we have no pre-conditions to validate return true; } }
leethree/attentive-ui
SuperMonkey/src/com/android/commands/monkey/MonkeySourceNetwork.java
Java
apache-2.0
33,447
<html> <body> Reports calls to static methods or accesses of static fields on the current class which are qualified with the class name. Such qualification is unnecessary, and may be safely removed. <!-- tooltip end --> <p> <small>Powered by InspectionGadgets</small> </body> </html>
consulo/consulo-java
java-analysis-impl/src/main/resources/inspectionDescriptions/UnnecessarilyQualifiedStaticUsage.html
HTML
apache-2.0
283
package agent import ( "flag" ) func (this *Agent) BindFlags() { flag.BoolVar(&this.selfRegister, "self_register", true, "Registers self with the registry.") flag.IntVar(&this.ListenPort, "port", 25657, "Listening port for agent") flag.StringVar(&this.StatusPubsubTopic, "status_topic", "", "Status pubsub topic") flag.BoolVar(&this.EnableUI, "enable_ui", false, "Enables UI") flag.IntVar(&this.DockerUIPort, "dockerui_port", 25658, "Listening port for dockerui") flag.StringVar(&this.UiDocRoot, "ui_docroot", "", "UI DocRoot") }
infradash/dash
pkg/agent/flags.go
GO
apache-2.0
540
using System.Collections.Generic; using System.Net; using System.Net.Http; using System.Net.Http.Formatting; using System.Threading.Tasks; namespace RestSamples.Service { public class ApiServiceWithHttpClient : IApiService { private HttpMessageHandler _messageHandler; public ApiServiceWithHttpClient(HttpMessageHandler handler) { _messageHandler = handler; } public async Task<IEnumerable<string>> GetAllValues() { using (var httpClient = new HttpClient(_messageHandler)) { var httpMessage = new HttpRequestMessage(new HttpMethod("GET"), "http://localhost:59642/api/values"); var response = await httpClient.SendAsync(httpMessage); return await response.Content.ReadAsAsync<IEnumerable<string>>(); } } public async Task<bool> SaveValue(string value) { using (var httpClient = new HttpClient(_messageHandler)) { var httpMessage = new HttpRequestMessage(new HttpMethod("POST"), "http://localhost:59642/api/values"); httpMessage.Content = new ObjectContent(typeof(string), value, new JsonMediaTypeFormatter()); var response = await httpClient.SendAsync(httpMessage); return response.StatusCode == HttpStatusCode.Created; } } } }
chniotis/PlayGround
RestSamples/RestSamples/Service/ApiServiceWithHttpClient.cs
C#
apache-2.0
1,421
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ #include "engine/engine-internal.h" #include "framing/framing.h" #include "sasl/sasl-internal.h" #include "ssl/ssl-internal.h" #include "autodetect.h" #include "protocol.h" #include "dispatch_actions.h" #include "proton/event.h" #include "platform.h" #include "platform_fmt.h" #include "../log_private.h" #include <stdlib.h> #include <string.h> #include <assert.h> #include <stdarg.h> #include <stdio.h> static ssize_t transport_consume(pn_transport_t *transport); // delivery buffers void pn_delivery_map_init(pn_delivery_map_t *db, pn_sequence_t next) { db->deliveries = pn_hash(PN_WEAKREF, 0, 0.75); db->next = next; } void pn_delivery_map_free(pn_delivery_map_t *db) { pn_free(db->deliveries); } pn_delivery_t *pn_delivery_map_get(pn_delivery_map_t *db, pn_sequence_t id) { return (pn_delivery_t *) pn_hash_get(db->deliveries, id); } static void pn_delivery_state_init(pn_delivery_state_t *ds, pn_delivery_t *delivery, pn_sequence_t id) { ds->id = id; ds->sent = false; ds->init = true; } pn_delivery_state_t *pn_delivery_map_push(pn_delivery_map_t *db, pn_delivery_t *delivery) { pn_delivery_state_t *ds = &delivery->state; pn_delivery_state_init(ds, delivery, db->next++); pn_hash_put(db->deliveries, ds->id, delivery); return ds; } void pn_delivery_map_del(pn_delivery_map_t *db, pn_delivery_t *delivery) { if (delivery->state.init) { delivery->state.init = false; delivery->state.sent = false; pn_hash_del(db->deliveries, delivery->state.id); } } void pn_delivery_map_clear(pn_delivery_map_t *dm) { pn_hash_t *hash = dm->deliveries; for (pn_handle_t entry = pn_hash_head(hash); entry; entry = pn_hash_next(hash, entry)) { pn_delivery_t *dlv = (pn_delivery_t *) pn_hash_value(hash, entry); pn_delivery_map_del(dm, dlv); } dm->next = 0; } static void pni_default_tracer(pn_transport_t *transport, const char *message) { fprintf(stderr, "[%p]:%s\n", (void *) transport, message); } static ssize_t pn_io_layer_input_passthru(pn_transport_t *, unsigned int, const char *, size_t ); static ssize_t pn_io_layer_output_passthru(pn_transport_t *, unsigned int, char *, size_t ); static ssize_t pn_io_layer_input_error(pn_transport_t *, unsigned int, const char *, size_t ); static ssize_t pn_io_layer_output_error(pn_transport_t *, unsigned int, char *, size_t ); static ssize_t pn_io_layer_input_setup(pn_transport_t *transport, unsigned int layer, const char *bytes, size_t available); static ssize_t pn_io_layer_output_setup(pn_transport_t *transport, unsigned int layer, char *bytes, size_t available); static ssize_t pn_input_read_amqp_header(pn_transport_t *transport, unsigned int layer, const char *bytes, size_t available); static ssize_t pn_input_read_amqp(pn_transport_t *transport, unsigned int layer, const char *bytes, size_t available); static ssize_t pn_output_write_amqp_header(pn_transport_t *transport, unsigned int layer, char *bytes, size_t available); static ssize_t pn_output_write_amqp(pn_transport_t *transport, unsigned int layer, char *bytes, size_t available); static pn_timestamp_t pn_tick_amqp(pn_transport_t *transport, unsigned int layer, pn_timestamp_t now); static ssize_t pn_io_layer_input_autodetect(pn_transport_t *transport, unsigned int layer, const char *bytes, size_t available); static ssize_t pn_io_layer_output_null(pn_transport_t *transport, unsigned int layer, char *bytes, size_t available); const pn_io_layer_t amqp_header_layer = { pn_input_read_amqp_header, pn_output_write_amqp_header, pn_tick_amqp, NULL }; const pn_io_layer_t amqp_write_header_layer = { pn_input_read_amqp, pn_output_write_amqp_header, pn_tick_amqp, NULL }; const pn_io_layer_t amqp_read_header_layer = { pn_input_read_amqp_header, pn_output_write_amqp, pn_tick_amqp, NULL }; const pn_io_layer_t amqp_layer = { pn_input_read_amqp, pn_output_write_amqp, pn_tick_amqp, NULL }; const pn_io_layer_t pni_setup_layer = { pn_io_layer_input_setup, pn_io_layer_output_setup, NULL, NULL }; const pn_io_layer_t pni_autodetect_layer = { pn_io_layer_input_autodetect, pn_io_layer_output_null, NULL, NULL }; const pn_io_layer_t pni_passthru_layer = { pn_io_layer_input_passthru, pn_io_layer_output_passthru, NULL, NULL }; const pn_io_layer_t pni_error_layer = { pn_io_layer_input_error, pn_io_layer_output_error, NULL, NULL }; /* Set up the transport protocol layers depending on what is configured */ static void pn_io_layer_setup(pn_transport_t *transport, unsigned int layer) { assert(layer == 0); // Figure out if we are server or not if (transport->server) { // XXX: This is currently a large hack to work around the SSL // code not handling a connection error before being set up fully if (transport->ssl && pn_ssl_allow_unsecured(transport)) { transport->io_layers[layer++] = &pni_autodetect_layer; return; } } if (transport->ssl) { transport->io_layers[layer++] = &ssl_layer; } if (transport->server) { transport->io_layers[layer++] = &pni_autodetect_layer; return; } if (transport->sasl) { transport->io_layers[layer++] = &sasl_header_layer; } transport->io_layers[layer++] = &amqp_header_layer; } ssize_t pn_io_layer_input_setup(pn_transport_t *transport, unsigned int layer, const char *bytes, size_t available) { pn_io_layer_setup(transport, layer); return transport->io_layers[layer]->process_input(transport, layer, bytes, available); } ssize_t pn_io_layer_output_setup(pn_transport_t *transport, unsigned int layer, char *bytes, size_t available) { pn_io_layer_setup(transport, layer); return transport->io_layers[layer]->process_output(transport, layer, bytes, available); } static void pni_set_error_layer(pn_transport_t *transport) { transport->io_layers[0] = &pni_error_layer; } // Autodetect the layer by reading the protocol header ssize_t pn_io_layer_input_autodetect(pn_transport_t *transport, unsigned int layer, const char *bytes, size_t available) { const char* error; bool eos = pn_transport_capacity(transport)==PN_EOS; if (eos && available==0) { pn_do_error(transport, "amqp:connection:framing-error", "No valid protocol header found"); pni_set_error_layer(transport); return PN_EOS; } pni_protocol_type_t protocol = pni_sniff_header(bytes, available); if (transport->trace & PN_TRACE_DRV) pn_transport_logf(transport, "%s detected", pni_protocol_name(protocol)); switch (protocol) { case PNI_PROTOCOL_SSL: if (!transport->ssl) { pn_ssl(transport); } transport->io_layers[layer] = &ssl_layer; transport->io_layers[layer+1] = &pni_autodetect_layer; return ssl_layer.process_input(transport, layer, bytes, available); case PNI_PROTOCOL_AMQP_SSL: if (!transport->ssl) { pn_ssl(transport); } transport->io_layers[layer] = &ssl_layer; transport->io_layers[layer+1] = &pni_autodetect_layer; return 8; case PNI_PROTOCOL_AMQP_SASL: if (!transport->sasl) { pn_sasl(transport); } transport->io_layers[layer] = &sasl_write_header_layer; transport->io_layers[layer+1] = &pni_autodetect_layer; if (transport->trace & PN_TRACE_FRM) pn_transport_logf(transport, " <- %s", "SASL"); return 8; case PNI_PROTOCOL_AMQP1: if (transport->sasl && pn_sasl_state((pn_sasl_t *)transport)==PN_SASL_IDLE) { if (pn_sasl_skipping_allowed(transport)) { pn_sasl_done((pn_sasl_t *)transport, PN_SASL_SKIPPED); } else { pn_do_error(transport, "amqp:connection:policy-error", "Client skipped SASL exchange - forbidden"); pni_set_error_layer(transport); return 8; } } transport->io_layers[layer] = &amqp_write_header_layer; if (transport->trace & PN_TRACE_FRM) pn_transport_logf(transport, " <- %s", "AMQP"); return 8; case PNI_PROTOCOL_INSUFFICIENT: if (!eos) return 0; error = "End of input stream before protocol detection"; break; case PNI_PROTOCOL_AMQP_OTHER: error = "Incompatible AMQP connection detected"; break; case PNI_PROTOCOL_UNKNOWN: default: error = "Unknown protocol detected"; break; } char quoted[1024]; pn_quote_data(quoted, 1024, bytes, available); pn_do_error(transport, "amqp:connection:framing-error", "%s: '%s'%s", error, quoted, !eos ? "" : " (connection aborted)"); pni_set_error_layer(transport); return 0; } // We don't know what the output should be - do nothing ssize_t pn_io_layer_output_null(pn_transport_t *transport, unsigned int layer, char *bytes, size_t available) { return 0; } /** Pass through input handler */ ssize_t pn_io_layer_input_passthru(pn_transport_t *transport, unsigned int layer, const char *data, size_t available) { if (layer+1<PN_IO_LAYER_CT) return transport->io_layers[layer+1]->process_input(transport, layer+1, data, available); return PN_EOS; } /** Pass through output handler */ ssize_t pn_io_layer_output_passthru(pn_transport_t *transport, unsigned int layer, char *data, size_t available) { if (layer+1<PN_IO_LAYER_CT) return transport->io_layers[layer+1]->process_output(transport, layer+1, data, available); return PN_EOS; } /** Input handler after detected error */ ssize_t pn_io_layer_input_error(pn_transport_t *transport, unsigned int layer, const char *data, size_t available) { return PN_EOS; } /** Output handler after detected error */ ssize_t pn_io_layer_output_error(pn_transport_t *transport, unsigned int layer, char *data, size_t available) { return PN_EOS; } static void pn_transport_initialize(void *object) { pn_transport_t *transport = (pn_transport_t *)object; transport->freed = false; transport->output_buf = NULL; transport->output_size = PN_DEFAULT_MAX_FRAME_SIZE ? PN_DEFAULT_MAX_FRAME_SIZE : 16 * 1024; transport->input_buf = NULL; transport->input_size = PN_DEFAULT_MAX_FRAME_SIZE ? PN_DEFAULT_MAX_FRAME_SIZE : 16 * 1024; transport->tracer = pni_default_tracer; transport->sasl = NULL; transport->ssl = NULL; transport->scratch = pn_string(NULL); transport->args = pn_data(16); transport->output_args = pn_data(16); transport->frame = pn_buffer(4*1024); transport->input_frames_ct = 0; transport->output_frames_ct = 0; transport->connection = NULL; transport->context = pn_record(); for (int layer=0; layer<PN_IO_LAYER_CT; ++layer) { transport->io_layers[layer] = NULL; } // Defer setting up the layers until the first data arrives or is sent transport->io_layers[0] = &pni_setup_layer; transport->open_sent = false; transport->open_rcvd = false; transport->close_sent = false; transport->close_rcvd = false; transport->tail_closed = false; transport->head_closed = false; transport->remote_container = NULL; transport->remote_hostname = NULL; transport->local_max_frame = PN_DEFAULT_MAX_FRAME_SIZE; transport->remote_max_frame = 0; transport->channel_max = 0; transport->remote_channel_max = 0; transport->local_idle_timeout = 0; transport->dead_remote_deadline = 0; transport->last_bytes_input = 0; transport->remote_idle_timeout = 0; transport->keepalive_deadline = 0; transport->last_bytes_output = 0; transport->remote_offered_capabilities = pn_data(0); transport->remote_desired_capabilities = pn_data(0); transport->remote_properties = pn_data(0); transport->disp_data = pn_data(0); pn_condition_init(&transport->remote_condition); pn_condition_init(&transport->condition); transport->error = pn_error(); transport->local_channels = pn_hash(PN_WEAKREF, 0, 0.75); transport->remote_channels = pn_hash(PN_WEAKREF, 0, 0.75); transport->bytes_input = 0; transport->bytes_output = 0; transport->input_pending = 0; transport->output_pending = 0; transport->done_processing = false; transport->posted_idle_timeout = false; transport->server = false; transport->halt = false; transport->referenced = true; transport->trace = (pn_env_bool("PN_TRACE_RAW") ? PN_TRACE_RAW : PN_TRACE_OFF) | (pn_env_bool("PN_TRACE_FRM") ? PN_TRACE_FRM : PN_TRACE_OFF) | (pn_env_bool("PN_TRACE_DRV") ? PN_TRACE_DRV : PN_TRACE_OFF); } pn_session_t *pn_channel_state(pn_transport_t *transport, uint16_t channel) { return (pn_session_t *) pn_hash_get(transport->remote_channels, channel); } static void pni_map_remote_channel(pn_session_t *session, uint16_t channel) { pn_transport_t *transport = session->connection->transport; pn_hash_put(transport->remote_channels, channel, session); session->state.remote_channel = channel; pn_ep_incref(&session->endpoint); } void pni_transport_unbind_handles(pn_hash_t *handles, bool reset_state); static void pni_unmap_remote_channel(pn_session_t *ssn) { // XXX: should really update link state also pn_delivery_map_clear(&ssn->state.incoming); pni_transport_unbind_handles(ssn->state.remote_handles, false); pn_transport_t *transport = ssn->connection->transport; uint16_t channel = ssn->state.remote_channel; ssn->state.remote_channel = -2; if (pn_hash_get(transport->remote_channels, channel)) { pn_ep_decref(&ssn->endpoint); } // note: may free the session: pn_hash_del(transport->remote_channels, channel); } static void pn_transport_incref(void *object) { pn_transport_t *transport = (pn_transport_t *) object; if (!transport->referenced) { transport->referenced = true; if (transport->connection) { pn_incref(transport->connection); } else { pn_object_incref(object); } } else { pn_object_incref(object); } } static void pn_transport_finalize(void *object); #define pn_transport_new pn_object_new #define pn_transport_refcount pn_object_refcount #define pn_transport_decref pn_object_decref #define pn_transport_reify pn_object_reify #define pn_transport_hashcode NULL #define pn_transport_compare NULL #define pn_transport_inspect NULL pn_transport_t *pn_transport(void) { #define pn_transport_free pn_object_free static const pn_class_t clazz = PN_METACLASS(pn_transport); #undef pn_transport_free pn_transport_t *transport = (pn_transport_t *) pn_class_new(&clazz, sizeof(pn_transport_t)); if (!transport) return NULL; transport->output_buf = (char *) malloc(transport->output_size); if (!transport->output_buf) { pn_transport_free(transport); return NULL; } transport->input_buf = (char *) malloc(transport->input_size); if (!transport->input_buf) { pn_transport_free(transport); return NULL; } transport->capacity = 4*1024; transport->available = 0; transport->output = (char *) malloc(transport->capacity); if (!transport->output) { pn_transport_free(transport); return NULL; } return transport; } void pn_transport_set_server(pn_transport_t *transport) { transport->server = true; } void pn_transport_free(pn_transport_t *transport) { if (!transport) return; assert(!transport->freed); transport->freed = true; pn_decref(transport); } static void pn_transport_finalize(void *object) { pn_transport_t *transport = (pn_transport_t *) object; if (transport->referenced && transport->connection && pn_refcount(transport->connection) > 1) { pn_object_incref(transport); transport->referenced = false; pn_decref(transport->connection); return; } // once the application frees the transport, no further I/O // processing can be done to the connection: pn_transport_unbind(transport); // we may have posted events, so stay alive until they are processed if (pn_refcount(transport) > 0) return; pn_free(transport->context); pn_ssl_free(transport); pn_sasl_free(transport); free(transport->remote_container); free(transport->remote_hostname); pn_free(transport->remote_offered_capabilities); pn_free(transport->remote_desired_capabilities); pn_free(transport->remote_properties); pn_free(transport->disp_data); pn_condition_tini(&transport->remote_condition); pn_condition_tini(&transport->condition); pn_error_free(transport->error); pn_free(transport->local_channels); pn_free(transport->remote_channels); if (transport->input_buf) free(transport->input_buf); if (transport->output_buf) free(transport->output_buf); pn_free(transport->scratch); pn_data_free(transport->args); pn_data_free(transport->output_args); pn_buffer_free(transport->frame); free(transport->output); } static void pni_post_remote_open_events(pn_transport_t *transport, pn_connection_t *connection) { pn_collector_put(connection->collector, PN_OBJECT, connection, PN_CONNECTION_REMOTE_OPEN); if (transport->remote_idle_timeout) { pn_collector_put(connection->collector, PN_OBJECT, transport, PN_TRANSPORT); } } int pn_transport_bind(pn_transport_t *transport, pn_connection_t *connection) { assert(transport); assert(connection); if (transport->connection) return PN_STATE_ERR; if (connection->transport) return PN_STATE_ERR; transport->connection = connection; connection->transport = transport; pn_incref(connection); pn_connection_bound(connection); if (transport->open_rcvd) { PN_SET_REMOTE(connection->endpoint.state, PN_REMOTE_ACTIVE); pni_post_remote_open_events(transport, connection); transport->halt = false; transport_consume(transport); // blech - testBindAfterOpen } return 0; } void pni_transport_unbind_handles(pn_hash_t *handles, bool reset_state) { for (pn_handle_t h = pn_hash_head(handles); h; h = pn_hash_next(handles, h)) { uintptr_t key = pn_hash_key(handles, h); pn_link_t *link = (pn_link_t *) pn_hash_value(handles, h); if (reset_state) { pn_link_unbound(link); } pn_ep_decref(&link->endpoint); pn_hash_del(handles, key); } } void pni_transport_unbind_channels(pn_hash_t *channels) { for (pn_handle_t h = pn_hash_head(channels); h; h = pn_hash_next(channels, h)) { uintptr_t key = pn_hash_key(channels, h); pn_session_t *ssn = (pn_session_t *) pn_hash_value(channels, h); pn_delivery_map_clear(&ssn->state.incoming); pn_delivery_map_clear(&ssn->state.outgoing); pni_transport_unbind_handles(ssn->state.local_handles, true); pni_transport_unbind_handles(ssn->state.remote_handles, true); pn_session_unbound(ssn); pn_ep_decref(&ssn->endpoint); pn_hash_del(channels, key); } } int pn_transport_unbind(pn_transport_t *transport) { assert(transport); if (!transport->connection) return 0; pn_connection_t *conn = transport->connection; transport->connection = NULL; bool was_referenced = transport->referenced; pn_collector_put(conn->collector, PN_OBJECT, conn, PN_CONNECTION_UNBOUND); // XXX: what happens if the endpoints are freed before we get here? pn_session_t *ssn = pn_session_head(conn, 0); while (ssn) { pn_delivery_map_clear(&ssn->state.incoming); pn_delivery_map_clear(&ssn->state.outgoing); ssn = pn_session_next(ssn, 0); } pn_endpoint_t *endpoint = conn->endpoint_head; while (endpoint) { pn_condition_clear(&endpoint->remote_condition); pn_modified(conn, endpoint, true); endpoint = endpoint->endpoint_next; } pni_transport_unbind_channels(transport->local_channels); pni_transport_unbind_channels(transport->remote_channels); pn_connection_unbound(conn); if (was_referenced) { pn_decref(conn); } return 0; } pn_error_t *pn_transport_error(pn_transport_t *transport) { assert(transport); if (pn_condition_is_set(&transport->condition)) { pn_error_format(transport->error, PN_ERR, "%s: %s", pn_condition_get_name(&transport->condition), pn_condition_get_description(&transport->condition)); } else { pn_error_clear(transport->error); } return transport->error; } pn_condition_t *pn_transport_condition(pn_transport_t *transport) { assert(transport); return &transport->condition; } static void pni_map_remote_handle(pn_link_t *link, uint32_t handle) { link->state.remote_handle = handle; pn_hash_put(link->session->state.remote_handles, handle, link); pn_ep_incref(&link->endpoint); } static void pni_unmap_remote_handle(pn_link_t *link) { uintptr_t handle = link->state.remote_handle; link->state.remote_handle = -2; if (pn_hash_get(link->session->state.remote_handles, handle)) { pn_ep_decref(&link->endpoint); } // may delete link: pn_hash_del(link->session->state.remote_handles, handle); } pn_link_t *pn_handle_state(pn_session_t *ssn, uint32_t handle) { return (pn_link_t *) pn_hash_get(ssn->state.remote_handles, handle); } bool pni_disposition_batchable(pn_disposition_t *disposition) { switch (disposition->type) { case PN_ACCEPTED: return true; case PN_RELEASED: return true; default: return false; } } void pni_disposition_encode(pn_disposition_t *disposition, pn_data_t *data) { pn_condition_t *cond = &disposition->condition; switch (disposition->type) { case PN_RECEIVED: pn_data_put_list(data); pn_data_enter(data); pn_data_put_uint(data, disposition->section_number); pn_data_put_ulong(data, disposition->section_offset); pn_data_exit(data); break; case PN_ACCEPTED: case PN_RELEASED: return; case PN_REJECTED: pn_data_fill(data, "[?DL[sSC]]", pn_condition_is_set(cond), ERROR, pn_condition_get_name(cond), pn_condition_get_description(cond), pn_condition_info(cond)); break; case PN_MODIFIED: pn_data_fill(data, "[ooC]", disposition->failed, disposition->undeliverable, disposition->annotations); break; default: pn_data_copy(data, disposition->data); break; } } void pn_do_trace(pn_transport_t *transport, uint16_t ch, pn_dir_t dir, pn_data_t *args, const char *payload, size_t size) { if (transport->trace & PN_TRACE_FRM) { pn_string_format(transport->scratch, "%u %s ", ch, dir == OUT ? "->" : "<-"); pn_inspect(args, transport->scratch); if (pn_data_size(args)==0) { pn_string_addf(transport->scratch, "(EMPTY FRAME)"); } if (size) { char buf[1024]; int e = pn_quote_data(buf, 1024, payload, size); pn_string_addf(transport->scratch, " (%" PN_ZU ") \"%s\"%s", size, buf, e == PN_OVERFLOW ? "... (truncated)" : ""); } pn_transport_log(transport, pn_string_get(transport->scratch)); } } int pn_post_frame(pn_transport_t *transport, uint8_t type, uint16_t ch, const char *fmt, ...) { pn_buffer_t *frame_buf = transport->frame; va_list ap; va_start(ap, fmt); pn_data_clear(transport->output_args); int err = pn_data_vfill(transport->output_args, fmt, ap); va_end(ap); if (err) { pn_transport_logf(transport, "error posting frame: %s, %s: %s", fmt, pn_code(err), pn_error_text(pn_data_error(transport->output_args))); return PN_ERR; } pn_do_trace(transport, ch, OUT, transport->output_args, NULL, 0); encode_performatives: pn_buffer_clear( frame_buf ); pn_buffer_memory_t buf = pn_buffer_memory( frame_buf ); buf.size = pn_buffer_available( frame_buf ); ssize_t wr = pn_data_encode( transport->output_args, buf.start, buf.size ); if (wr < 0) { if (wr == PN_OVERFLOW) { pn_buffer_ensure( frame_buf, pn_buffer_available( frame_buf ) * 2 ); goto encode_performatives; } pn_transport_logf(transport, "error posting frame: %s", pn_code(wr)); return PN_ERR; } pn_frame_t frame = {type}; frame.channel = ch; frame.payload = buf.start; frame.size = wr; size_t n; while (!(n = pn_write_frame(transport->output + transport->available, transport->capacity - transport->available, frame))) { transport->capacity *= 2; transport->output = (char *) realloc(transport->output, transport->capacity); } transport->output_frames_ct += 1; if (transport->trace & PN_TRACE_RAW) { pn_string_set(transport->scratch, "RAW: \""); pn_quote(transport->scratch, transport->output + transport->available, n); pn_string_addf(transport->scratch, "\""); pn_transport_log(transport, pn_string_get(transport->scratch)); } transport->available += n; return 0; } int pn_post_amqp_transfer_frame(pn_transport_t *transport, uint16_t ch, uint32_t handle, pn_sequence_t id, pn_bytes_t *payload, const pn_bytes_t *tag, uint32_t message_format, bool settled, bool more, pn_sequence_t frame_limit, uint64_t code, pn_data_t* state) { bool more_flag = more; int framecount = 0; pn_buffer_t *frame = transport->frame; // create preformatives, assuming 'more' flag need not change compute_performatives: pn_data_clear(transport->output_args); int err = pn_data_fill(transport->output_args, "DL[IIzIoon?DLC]", TRANSFER, handle, id, tag->size, tag->start, message_format, settled, more_flag, (bool)code, code, state); if (err) { pn_transport_logf(transport, "error posting transfer frame: %s: %s", pn_code(err), pn_error_text(pn_data_error(transport->output_args))); return PN_ERR; } do { // send as many frames as possible without changing the 'more' flag... encode_performatives: pn_buffer_clear( frame ); pn_buffer_memory_t buf = pn_buffer_memory( frame ); buf.size = pn_buffer_available( frame ); ssize_t wr = pn_data_encode(transport->output_args, buf.start, buf.size); if (wr < 0) { if (wr == PN_OVERFLOW) { pn_buffer_ensure( frame, pn_buffer_available( frame ) * 2 ); goto encode_performatives; } pn_transport_logf(transport, "error posting frame: %s", pn_code(wr)); return PN_ERR; } buf.size = wr; // check if we need to break up the outbound frame size_t available = payload->size; if (transport->remote_max_frame) { if ((available + buf.size) > transport->remote_max_frame - 8) { available = transport->remote_max_frame - 8 - buf.size; if (more_flag == false) { more_flag = true; goto compute_performatives; // deal with flag change } } else if (more_flag == true && more == false) { // caller has no more, and this is the last frame more_flag = false; goto compute_performatives; } } if (pn_buffer_available( frame ) < (available + buf.size)) { // not enough room for payload - try again... pn_buffer_ensure( frame, available + buf.size ); goto encode_performatives; } pn_do_trace(transport, ch, OUT, transport->output_args, payload->start, available); memmove( buf.start + buf.size, payload->start, available); payload->start += available; payload->size -= available; buf.size += available; pn_frame_t frame = {AMQP_FRAME_TYPE}; frame.channel = ch; frame.payload = buf.start; frame.size = buf.size; size_t n; while (!(n = pn_write_frame(transport->output + transport->available, transport->capacity - transport->available, frame))) { transport->capacity *= 2; transport->output = (char *) realloc(transport->output, transport->capacity); } transport->output_frames_ct += 1; framecount++; if (transport->trace & PN_TRACE_RAW) { pn_string_set(transport->scratch, "RAW: \""); pn_quote(transport->scratch, transport->output + transport->available, n); pn_string_addf(transport->scratch, "\""); pn_transport_log(transport, pn_string_get(transport->scratch)); } transport->available += n; } while (payload->size > 0 && framecount < frame_limit); return framecount; } int pn_post_close(pn_transport_t *transport, const char *condition, const char *description) { pn_condition_t *cond = NULL; if (transport->connection) { cond = pn_connection_condition(transport->connection); } pn_data_t *info = NULL; if (!condition && pn_condition_is_set(cond)) { condition = pn_condition_get_name(cond); description = pn_condition_get_description(cond); info = pn_condition_info(cond); } return pn_post_frame(transport, AMQP_FRAME_TYPE, 0, "DL[?DL[sSC]]", CLOSE, (bool) condition, ERROR, condition, description, info); } static pn_collector_t *pni_transport_collector(pn_transport_t *transport) { if (transport->connection && transport->connection->collector) { return transport->connection->collector; } else { return NULL; } } static void pni_maybe_post_closed(pn_transport_t *transport) { pn_collector_t *collector = pni_transport_collector(transport); if (transport->head_closed && transport->tail_closed) { pn_collector_put(collector, PN_OBJECT, transport, PN_TRANSPORT_CLOSED); } } static void pni_close_tail(pn_transport_t *transport) { if (!transport->tail_closed) { transport->tail_closed = true; pn_collector_t *collector = pni_transport_collector(transport); pn_collector_put(collector, PN_OBJECT, transport, PN_TRANSPORT_TAIL_CLOSED); pni_maybe_post_closed(transport); } } int pn_do_error(pn_transport_t *transport, const char *condition, const char *fmt, ...) { va_list ap; va_start(ap, fmt); char buf[1024]; if (fmt) { // XXX: result vsnprintf(buf, 1024, fmt, ap); } else { buf[0] = '\0'; } va_end(ap); if (!transport->close_sent) { if (!transport->open_sent) { pn_post_frame(transport, AMQP_FRAME_TYPE, 0, "DL[S]", OPEN, ""); } pn_post_close(transport, condition, buf); transport->close_sent = true; } transport->halt = true; pn_condition_t *cond = &transport->condition; if (!pn_condition_is_set(cond)) { pn_condition_set_name(cond, condition); if (fmt) { pn_condition_set_description(cond, buf); } } else { const char *first = pn_condition_get_description(cond); if (first && fmt) { char extended[2048]; snprintf(extended, 2048, "%s (%s)", first, buf); pn_condition_set_description(cond, extended); } else if (fmt) { pn_condition_set_description(cond, buf); } } pn_collector_t *collector = pni_transport_collector(transport); pn_collector_put(collector, PN_OBJECT, transport, PN_TRANSPORT_ERROR); if (transport->trace & PN_TRACE_DRV) { pn_transport_logf(transport, "ERROR %s %s", condition, buf); } transport->done_processing = true; pni_close_tail(transport); return PN_ERR; } static char *pn_bytes_strdup(pn_bytes_t str) { return pn_strndup(str.start, str.size); } int pn_do_open(pn_transport_t *transport, uint8_t frame_type, uint16_t channel, pn_data_t *args, const pn_bytes_t *payload) { pn_connection_t *conn = transport->connection; bool container_q, hostname_q; pn_bytes_t remote_container, remote_hostname; pn_data_clear(transport->remote_offered_capabilities); pn_data_clear(transport->remote_desired_capabilities); pn_data_clear(transport->remote_properties); int err = pn_data_scan(args, "D.[?S?SIHI..CCC]", &container_q, &remote_container, &hostname_q, &remote_hostname, &transport->remote_max_frame, &transport->remote_channel_max, &transport->remote_idle_timeout, transport->remote_offered_capabilities, transport->remote_desired_capabilities, transport->remote_properties); if (err) return err; if (transport->remote_max_frame > 0) { if (transport->remote_max_frame < AMQP_MIN_MAX_FRAME_SIZE) { pn_transport_logf(transport, "Peer advertised bad max-frame (%u), forcing to %u", transport->remote_max_frame, AMQP_MIN_MAX_FRAME_SIZE); transport->remote_max_frame = AMQP_MIN_MAX_FRAME_SIZE; } } if (container_q) { transport->remote_container = pn_bytes_strdup(remote_container); } else { transport->remote_container = NULL; } if (hostname_q) { transport->remote_hostname = pn_bytes_strdup(remote_hostname); } else { transport->remote_hostname = NULL; } if (conn) { PN_SET_REMOTE(conn->endpoint.state, PN_REMOTE_ACTIVE); pni_post_remote_open_events(transport, conn); } else { transport->halt = true; } transport->open_rcvd = true; return 0; } int pn_do_begin(pn_transport_t *transport, uint8_t frame_type, uint16_t channel, pn_data_t *args, const pn_bytes_t *payload) { bool reply; uint16_t remote_channel; pn_sequence_t next; int err = pn_data_scan(args, "D.[?HI]", &reply, &remote_channel, &next); if (err) return err; pn_session_t *ssn; if (reply) { // XXX: what if session is NULL? ssn = (pn_session_t *) pn_hash_get(transport->local_channels, remote_channel); } else { ssn = pn_session(transport->connection); } ssn->state.incoming_transfer_count = next; pni_map_remote_channel(ssn, channel); PN_SET_REMOTE(ssn->endpoint.state, PN_REMOTE_ACTIVE); pn_collector_put(transport->connection->collector, PN_OBJECT, ssn, PN_SESSION_REMOTE_OPEN); return 0; } pn_link_t *pn_find_link(pn_session_t *ssn, pn_bytes_t name, bool is_sender) { pn_endpoint_type_t type = is_sender ? SENDER : RECEIVER; for (size_t i = 0; i < pn_list_size(ssn->links); i++) { pn_link_t *link = (pn_link_t *) pn_list_get(ssn->links, i); if (link->endpoint.type == type && !strncmp(name.start, pn_string_get(link->name), name.size)) { return link; } } return NULL; } static pn_expiry_policy_t symbol2policy(pn_bytes_t symbol) { if (!symbol.start) return PN_EXPIRE_WITH_SESSION; if (!strncmp(symbol.start, "link-detach", symbol.size)) return PN_EXPIRE_WITH_LINK; if (!strncmp(symbol.start, "session-end", symbol.size)) return PN_EXPIRE_WITH_SESSION; if (!strncmp(symbol.start, "connection-close", symbol.size)) return PN_EXPIRE_WITH_CONNECTION; if (!strncmp(symbol.start, "never", symbol.size)) return PN_EXPIRE_NEVER; return PN_EXPIRE_WITH_SESSION; } static pn_distribution_mode_t symbol2dist_mode(const pn_bytes_t symbol) { if (!symbol.start) return PN_DIST_MODE_UNSPECIFIED; if (!strncmp(symbol.start, "move", symbol.size)) return PN_DIST_MODE_MOVE; if (!strncmp(symbol.start, "copy", symbol.size)) return PN_DIST_MODE_COPY; return PN_DIST_MODE_UNSPECIFIED; } static const char *dist_mode2symbol(const pn_distribution_mode_t mode) { switch (mode) { case PN_DIST_MODE_COPY: return "copy"; case PN_DIST_MODE_MOVE: return "move"; default: return NULL; } } int pn_terminus_set_address_bytes(pn_terminus_t *terminus, pn_bytes_t address) { assert(terminus); return pn_string_setn(terminus->address, address.start, address.size); } int pn_do_attach(pn_transport_t *transport, uint8_t frame_type, uint16_t channel, pn_data_t *args, const pn_bytes_t *payload) { pn_bytes_t name; uint32_t handle; bool is_sender; pn_bytes_t source, target; pn_durability_t src_dr, tgt_dr; pn_bytes_t src_exp, tgt_exp; pn_seconds_t src_timeout, tgt_timeout; bool src_dynamic, tgt_dynamic; pn_sequence_t idc; pn_bytes_t dist_mode; bool snd_settle, rcv_settle; uint8_t snd_settle_mode, rcv_settle_mode; int err = pn_data_scan(args, "D.[SIo?B?BD.[SIsIo.s]D.[SIsIo]..I]", &name, &handle, &is_sender, &snd_settle, &snd_settle_mode, &rcv_settle, &rcv_settle_mode, &source, &src_dr, &src_exp, &src_timeout, &src_dynamic, &dist_mode, &target, &tgt_dr, &tgt_exp, &tgt_timeout, &tgt_dynamic, &idc); if (err) return err; char strbuf[128]; // avoid malloc for most link names char *strheap = (name.size >= sizeof(strbuf)) ? (char *) malloc(name.size + 1) : NULL; char *strname = strheap ? strheap : strbuf; strncpy(strname, name.start, name.size); strname[name.size] = '\0'; pn_session_t *ssn = pn_channel_state(transport, channel); if (!ssn) { pn_do_error(transport, "amqp:connection:no-session", "attach without a session"); if (strheap) free(strheap); return PN_EOS; } pn_link_t *link = pn_find_link(ssn, name, is_sender); if (!link) { if (is_sender) { link = (pn_link_t *) pn_sender(ssn, strname); } else { link = (pn_link_t *) pn_receiver(ssn, strname); } } if (strheap) { free(strheap); } pni_map_remote_handle(link, handle); PN_SET_REMOTE(link->endpoint.state, PN_REMOTE_ACTIVE); pn_terminus_t *rsrc = &link->remote_source; if (source.start || src_dynamic) { pn_terminus_set_type(rsrc, PN_SOURCE); pn_terminus_set_address_bytes(rsrc, source); pn_terminus_set_durability(rsrc, src_dr); pn_terminus_set_expiry_policy(rsrc, symbol2policy(src_exp)); pn_terminus_set_timeout(rsrc, src_timeout); pn_terminus_set_dynamic(rsrc, src_dynamic); pn_terminus_set_distribution_mode(rsrc, symbol2dist_mode(dist_mode)); } else { pn_terminus_set_type(rsrc, PN_UNSPECIFIED); } pn_terminus_t *rtgt = &link->remote_target; if (target.start || tgt_dynamic) { pn_terminus_set_type(rtgt, PN_TARGET); pn_terminus_set_address_bytes(rtgt, target); pn_terminus_set_durability(rtgt, tgt_dr); pn_terminus_set_expiry_policy(rtgt, symbol2policy(tgt_exp)); pn_terminus_set_timeout(rtgt, tgt_timeout); pn_terminus_set_dynamic(rtgt, tgt_dynamic); } else { uint64_t code = 0; pn_data_clear(link->remote_target.capabilities); err = pn_data_scan(args, "D.[.....D..DL[C]...]", &code, link->remote_target.capabilities); if (err) return err; if (code == COORDINATOR) { pn_terminus_set_type(rtgt, PN_COORDINATOR); } else { pn_terminus_set_type(rtgt, PN_UNSPECIFIED); } } if (snd_settle) link->remote_snd_settle_mode = snd_settle_mode; if (rcv_settle) link->remote_rcv_settle_mode = rcv_settle_mode; pn_data_clear(link->remote_source.properties); pn_data_clear(link->remote_source.filter); pn_data_clear(link->remote_source.outcomes); pn_data_clear(link->remote_source.capabilities); pn_data_clear(link->remote_target.properties); pn_data_clear(link->remote_target.capabilities); err = pn_data_scan(args, "D.[.....D.[.....C.C.CC]D.[.....CC]", link->remote_source.properties, link->remote_source.filter, link->remote_source.outcomes, link->remote_source.capabilities, link->remote_target.properties, link->remote_target.capabilities); if (err) return err; pn_data_rewind(link->remote_source.properties); pn_data_rewind(link->remote_source.filter); pn_data_rewind(link->remote_source.outcomes); pn_data_rewind(link->remote_source.capabilities); pn_data_rewind(link->remote_target.properties); pn_data_rewind(link->remote_target.capabilities); if (!is_sender) { link->state.delivery_count = idc; } pn_collector_put(transport->connection->collector, PN_OBJECT, link, PN_LINK_REMOTE_OPEN); return 0; } int pn_post_flow(pn_transport_t *transport, pn_session_t *ssn, pn_link_t *link); // free the delivery static void pn_full_settle(pn_delivery_map_t *db, pn_delivery_t *delivery) { assert(!delivery->work); pn_clear_tpwork(delivery); pn_delivery_map_del(db, delivery); pn_incref(delivery); pn_decref(delivery); } int pn_do_transfer(pn_transport_t *transport, uint8_t frame_type, uint16_t channel, pn_data_t *args, const pn_bytes_t *payload) { // XXX: multi transfer uint32_t handle; pn_bytes_t tag; bool id_present; pn_sequence_t id; bool settled; bool more; bool has_type; uint64_t type; pn_data_clear(transport->disp_data); int err = pn_data_scan(args, "D.[I?Iz.oo.D?LC]", &handle, &id_present, &id, &tag, &settled, &more, &has_type, &type, transport->disp_data); if (err) return err; pn_session_t *ssn = pn_channel_state(transport, channel); if (!ssn->state.incoming_window) { return pn_do_error(transport, "amqp:session:window-violation", "incoming session window exceeded"); } pn_link_t *link = pn_handle_state(ssn, handle); pn_delivery_t *delivery; if (link->unsettled_tail && !link->unsettled_tail->done) { delivery = link->unsettled_tail; } else { pn_delivery_map_t *incoming = &ssn->state.incoming; if (!ssn->state.incoming_init) { incoming->next = id; ssn->state.incoming_init = true; ssn->incoming_deliveries++; } delivery = pn_delivery(link, pn_dtag(tag.start, tag.size)); pn_delivery_state_t *state = pn_delivery_map_push(incoming, delivery); if (id_present && id != state->id) { return pn_do_error(transport, "amqp:session:invalid-field", "sequencing error, expected delivery-id %u, got %u", state->id, id); } if (has_type) { delivery->remote.type = type; pn_data_copy(delivery->remote.data, transport->disp_data); } link->state.delivery_count++; link->state.link_credit--; link->queued++; // XXX: need to fill in remote state: delivery->remote.state = ...; delivery->remote.settled = settled; if (settled) { delivery->updated = true; pn_work_update(transport->connection, delivery); } } pn_buffer_append(delivery->bytes, payload->start, payload->size); ssn->incoming_bytes += payload->size; delivery->done = !more; ssn->state.incoming_transfer_count++; ssn->state.incoming_window--; // XXX: need better policy for when to refresh window if (!ssn->state.incoming_window && (int32_t) link->state.local_handle >= 0) { pn_post_flow(transport, ssn, link); } pn_collector_put(transport->connection->collector, PN_OBJECT, delivery, PN_DELIVERY); return 0; } int pn_do_flow(pn_transport_t *transport, uint8_t frame_type, uint16_t channel, pn_data_t *args, const pn_bytes_t *payload) { pn_sequence_t onext, inext, delivery_count; uint32_t iwin, owin, link_credit; uint32_t handle; bool inext_init, handle_init, dcount_init, drain; int err = pn_data_scan(args, "D.[?IIII?I?II.o]", &inext_init, &inext, &iwin, &onext, &owin, &handle_init, &handle, &dcount_init, &delivery_count, &link_credit, &drain); if (err) return err; pn_session_t *ssn = pn_channel_state(transport, channel); if (inext_init) { ssn->state.remote_incoming_window = inext + iwin - ssn->state.outgoing_transfer_count; } else { ssn->state.remote_incoming_window = iwin; } if (handle_init) { pn_link_t *link = pn_handle_state(ssn, handle); if (link->endpoint.type == SENDER) { pn_sequence_t receiver_count; if (dcount_init) { receiver_count = delivery_count; } else { // our initial delivery count receiver_count = 0; } pn_sequence_t old = link->state.link_credit; link->state.link_credit = receiver_count + link_credit - link->state.delivery_count; link->credit += link->state.link_credit - old; link->drain = drain; pn_delivery_t *delivery = pn_link_current(link); if (delivery) pn_work_update(transport->connection, delivery); } else { pn_sequence_t delta = delivery_count - link->state.delivery_count; if (delta > 0) { link->state.delivery_count += delta; link->state.link_credit -= delta; link->credit -= delta; link->drained += delta; } } pn_collector_put(transport->connection->collector, PN_OBJECT, link, PN_LINK_FLOW); } return 0; } #define SCAN_ERROR_DEFAULT ("D.[D.[sSC]") #define SCAN_ERROR_DETACH ("D.[..D.[sSC]") #define SCAN_ERROR_DISP ("[D.[sSC]") static int pn_scan_error(pn_data_t *data, pn_condition_t *condition, const char *fmt) { pn_bytes_t cond; pn_bytes_t desc; pn_condition_clear(condition); int err = pn_data_scan(data, fmt, &cond, &desc, condition->info); if (err) return err; pn_string_setn(condition->name, cond.start, cond.size); pn_string_setn(condition->description, desc.start, desc.size); pn_data_rewind(condition->info); return 0; } int pn_do_disposition(pn_transport_t *transport, uint8_t frame_type, uint16_t channel, pn_data_t *args, const pn_bytes_t *payload) { bool role; pn_sequence_t first, last; uint64_t type = 0; bool last_init, settled, type_init; pn_data_clear(transport->disp_data); int err = pn_data_scan(args, "D.[oI?IoD?LC]", &role, &first, &last_init, &last, &settled, &type_init, &type, transport->disp_data); if (err) return err; if (!last_init) last = first; pn_session_t *ssn = pn_channel_state(transport, channel); pn_delivery_map_t *deliveries; if (role) { deliveries = &ssn->state.outgoing; } else { deliveries = &ssn->state.incoming; } pn_data_rewind(transport->disp_data); bool remote_data = (pn_data_next(transport->disp_data) && pn_data_get_list(transport->disp_data) > 0); for (pn_sequence_t id = first; id <= last; id++) { pn_delivery_t *delivery = pn_delivery_map_get(deliveries, id); pn_disposition_t *remote = &delivery->remote; if (delivery) { if (type_init) remote->type = type; if (remote_data) { switch (type) { case PN_RECEIVED: pn_data_rewind(transport->disp_data); pn_data_next(transport->disp_data); pn_data_enter(transport->disp_data); if (pn_data_next(transport->disp_data)) remote->section_number = pn_data_get_uint(transport->disp_data); if (pn_data_next(transport->disp_data)) remote->section_offset = pn_data_get_ulong(transport->disp_data); break; case PN_ACCEPTED: break; case PN_REJECTED: err = pn_scan_error(transport->disp_data, &remote->condition, SCAN_ERROR_DISP); if (err) return err; break; case PN_RELEASED: break; case PN_MODIFIED: pn_data_rewind(transport->disp_data); pn_data_next(transport->disp_data); pn_data_enter(transport->disp_data); if (pn_data_next(transport->disp_data)) remote->failed = pn_data_get_bool(transport->disp_data); if (pn_data_next(transport->disp_data)) remote->undeliverable = pn_data_get_bool(transport->disp_data); pn_data_narrow(transport->disp_data); pn_data_clear(remote->data); pn_data_appendn(remote->annotations, transport->disp_data, 1); pn_data_widen(transport->disp_data); break; default: pn_data_copy(remote->data, transport->disp_data); break; } } remote->settled = settled; delivery->updated = true; pn_work_update(transport->connection, delivery); pn_collector_put(transport->connection->collector, PN_OBJECT, delivery, PN_DELIVERY); } } return 0; } int pn_do_detach(pn_transport_t *transport, uint8_t frame_type, uint16_t channel, pn_data_t *args, const pn_bytes_t *payload) { uint32_t handle; bool closed; int err = pn_data_scan(args, "D.[Io]", &handle, &closed); if (err) return err; pn_session_t *ssn = pn_channel_state(transport, channel); if (!ssn) { return pn_do_error(transport, "amqp:invalid-field", "no such channel: %u", channel); } pn_link_t *link = pn_handle_state(ssn, handle); if (!link) { return pn_do_error(transport, "amqp:invalid-field", "no such handle: %u", handle); } err = pn_scan_error(args, &link->endpoint.remote_condition, SCAN_ERROR_DETACH); if (err) return err; if (closed) { PN_SET_REMOTE(link->endpoint.state, PN_REMOTE_CLOSED); pn_collector_put(transport->connection->collector, PN_OBJECT, link, PN_LINK_REMOTE_CLOSE); } else { pn_collector_put(transport->connection->collector, PN_OBJECT, link, PN_LINK_REMOTE_DETACH); } pni_unmap_remote_handle(link); return 0; } int pn_do_end(pn_transport_t *transport, uint8_t frame_type, uint16_t channel, pn_data_t *args, const pn_bytes_t *payload) { pn_session_t *ssn = pn_channel_state(transport, channel); int err = pn_scan_error(args, &ssn->endpoint.remote_condition, SCAN_ERROR_DEFAULT); if (err) return err; PN_SET_REMOTE(ssn->endpoint.state, PN_REMOTE_CLOSED); pn_collector_put(transport->connection->collector, PN_OBJECT, ssn, PN_SESSION_REMOTE_CLOSE); pni_unmap_remote_channel(ssn); return 0; } int pn_do_close(pn_transport_t *transport, uint8_t frame_type, uint16_t channel, pn_data_t *args, const pn_bytes_t *payload) { pn_connection_t *conn = transport->connection; int err = pn_scan_error(args, &transport->remote_condition, SCAN_ERROR_DEFAULT); if (err) return err; transport->close_rcvd = true; PN_SET_REMOTE(conn->endpoint.state, PN_REMOTE_CLOSED); pn_collector_put(transport->connection->collector, PN_OBJECT, conn, PN_CONNECTION_REMOTE_CLOSE); return 0; } // deprecated ssize_t pn_transport_input(pn_transport_t *transport, const char *bytes, size_t available) { if (!transport) return PN_ARG_ERR; if (available == 0) { return pn_transport_close_tail(transport); } const size_t original = available; ssize_t capacity = pn_transport_capacity(transport); if (capacity < 0) return capacity; while (available && capacity) { char *dest = pn_transport_tail(transport); assert(dest); size_t count = pn_min( (size_t)capacity, available ); memmove( dest, bytes, count ); available -= count; bytes += count; int rc = pn_transport_process( transport, count ); if (rc < 0) return rc; capacity = pn_transport_capacity(transport); if (capacity < 0) return capacity; } return original - available; } // process pending input until none remaining or EOS static ssize_t transport_consume(pn_transport_t *transport) { // This allows whatever is driving the I/O to set the error // condition on the transport before doing pn_transport_close_head() // or pn_transport_close_tail(). This allows all transport errors to // flow to the app the same way, but provides cleaner error messages // since we don't try to look for a protocol header when, e.g. the // connection was refused. if (!transport->bytes_input && transport->tail_closed && pn_condition_is_set(&transport->condition)) { pn_do_error(transport, NULL, NULL); return PN_EOS; } size_t consumed = 0; while (transport->input_pending || transport->tail_closed) { ssize_t n; n = transport->io_layers[0]-> process_input( transport, 0, transport->input_buf + consumed, transport->input_pending ); if (n > 0) { consumed += n; transport->input_pending -= n; } else if (n == 0) { break; } else { assert(n == PN_EOS); if (transport->trace & (PN_TRACE_RAW | PN_TRACE_FRM)) pn_transport_log(transport, " <- EOS"); transport->input_pending = 0; // XXX ??? return n; } } if (transport->input_pending && consumed) { memmove( transport->input_buf, &transport->input_buf[consumed], transport->input_pending ); } return consumed; } static ssize_t pn_input_read_amqp_header(pn_transport_t* transport, unsigned int layer, const char* bytes, size_t available) { bool eos = pn_transport_capacity(transport)==PN_EOS; pni_protocol_type_t protocol = pni_sniff_header(bytes, available); switch (protocol) { case PNI_PROTOCOL_AMQP1: if (transport->io_layers[layer] == &amqp_read_header_layer) { transport->io_layers[layer] = &amqp_layer; } else { transport->io_layers[layer] = &amqp_write_header_layer; } if (transport->trace & PN_TRACE_FRM) pn_transport_logf(transport, " <- %s", "AMQP"); return 8; case PNI_PROTOCOL_INSUFFICIENT: if (!eos) return 0; /* Fallthru */ default: break; } char quoted[1024]; pn_quote_data(quoted, 1024, bytes, available); pn_do_error(transport, "amqp:connection:framing-error", "%s header mismatch: %s ['%s']%s", "AMQP", pni_protocol_name(protocol), quoted, !eos ? "" : " (connection aborted)"); return PN_EOS; } static ssize_t pn_input_read_amqp(pn_transport_t* transport, unsigned int layer, const char* bytes, size_t available) { if (transport->close_rcvd) { if (available > 0) { pn_do_error(transport, "amqp:connection:framing-error", "data after close"); return PN_EOS; } } if (!available) { pn_do_error(transport, "amqp:connection:framing-error", "connection aborted"); return PN_EOS; } ssize_t n = pn_dispatcher_input(transport, bytes, available, true, &transport->halt); if (n < 0) { //return pn_error_set(transport->error, n, "dispatch error"); return PN_EOS; } else if (transport->close_rcvd) { return PN_EOS; } else { return n; } } /* process AMQP related timer events */ static pn_timestamp_t pn_tick_amqp(pn_transport_t* transport, unsigned int layer, pn_timestamp_t now) { pn_timestamp_t timeout = 0; if (transport->local_idle_timeout) { if (transport->dead_remote_deadline == 0 || transport->last_bytes_input != transport->bytes_input) { transport->dead_remote_deadline = now + transport->local_idle_timeout; transport->last_bytes_input = transport->bytes_input; } else if (transport->dead_remote_deadline <= now) { transport->dead_remote_deadline = now + transport->local_idle_timeout; if (!transport->posted_idle_timeout) { transport->posted_idle_timeout = true; // Note: AMQP-1.0 really should define a generic "timeout" error, but does not. pn_do_error(transport, "amqp:resource-limit-exceeded", "local-idle-timeout expired"); } } timeout = transport->dead_remote_deadline; } // Prevent remote idle timeout as describe by AMQP 1.0: if (transport->remote_idle_timeout && !transport->close_sent) { if (transport->keepalive_deadline == 0 || transport->last_bytes_output != transport->bytes_output) { transport->keepalive_deadline = now + (pn_timestamp_t)(transport->remote_idle_timeout/2.0); transport->last_bytes_output = transport->bytes_output; } else if (transport->keepalive_deadline <= now) { transport->keepalive_deadline = now + (pn_timestamp_t)(transport->remote_idle_timeout/2.0); if (transport->available == 0) { // no outbound data pending // so send empty frame (and account for it!) pn_post_frame(transport, AMQP_FRAME_TYPE, 0, ""); transport->last_bytes_output += transport->available; } } timeout = pn_timestamp_min( timeout, transport->keepalive_deadline ); } return timeout; } int pn_process_conn_setup(pn_transport_t *transport, pn_endpoint_t *endpoint) { if (endpoint->type == CONNECTION) { if (!(endpoint->state & PN_LOCAL_UNINIT) && !transport->open_sent) { // as per the recommendation in the spec, advertise half our // actual timeout to the remote const pn_millis_t idle_timeout = transport->local_idle_timeout ? (transport->local_idle_timeout/2) : 0; pn_connection_t *connection = (pn_connection_t *) endpoint; const char *cid = pn_string_get(connection->container); int err = pn_post_frame(transport, AMQP_FRAME_TYPE, 0, "DL[SS?I?H?InnCCC]", OPEN, cid ? cid : "", pn_string_get(connection->hostname), // if not zero, advertise our max frame size and idle timeout (bool)transport->local_max_frame, transport->local_max_frame, (bool)transport->channel_max, transport->channel_max, (bool)idle_timeout, idle_timeout, connection->offered_capabilities, connection->desired_capabilities, connection->properties); if (err) return err; transport->open_sent = true; } } return 0; } static uint16_t allocate_alias(pn_hash_t *aliases) { for (uint32_t i = 0; i < 65536; i++) { if (!pn_hash_get(aliases, i)) { return i; } } assert(false); return 0; } size_t pn_session_outgoing_window(pn_session_t *ssn) { uint32_t size = ssn->connection->transport->remote_max_frame; if (!size) { return ssn->outgoing_deliveries; } else { pn_sequence_t frames = ssn->outgoing_bytes/size; if (ssn->outgoing_bytes % size) { frames++; } return pn_max(frames, ssn->outgoing_deliveries); } } size_t pn_session_incoming_window(pn_session_t *ssn) { uint32_t size = ssn->connection->transport->local_max_frame; if (!size) { return 2147483647; // biggest legal value } else { return (ssn->incoming_capacity - ssn->incoming_bytes)/size; } } static void pni_map_local_channel(pn_session_t *ssn) { pn_transport_t *transport = ssn->connection->transport; pn_session_state_t *state = &ssn->state; uint16_t channel = allocate_alias(transport->local_channels); state->local_channel = channel; pn_hash_put(transport->local_channels, channel, ssn); pn_ep_incref(&ssn->endpoint); } int pn_process_ssn_setup(pn_transport_t *transport, pn_endpoint_t *endpoint) { if (endpoint->type == SESSION && transport->open_sent) { pn_session_t *ssn = (pn_session_t *) endpoint; pn_session_state_t *state = &ssn->state; if (!(endpoint->state & PN_LOCAL_UNINIT) && state->local_channel == (uint16_t) -1) { pni_map_local_channel(ssn); state->incoming_window = pn_session_incoming_window(ssn); state->outgoing_window = pn_session_outgoing_window(ssn); pn_post_frame(transport, AMQP_FRAME_TYPE, state->local_channel, "DL[?HIII]", BEGIN, ((int16_t) state->remote_channel >= 0), state->remote_channel, state->outgoing_transfer_count, state->incoming_window, state->outgoing_window); } } return 0; } static const char *expiry_symbol(pn_expiry_policy_t policy) { switch (policy) { case PN_EXPIRE_WITH_LINK: return "link-detach"; case PN_EXPIRE_WITH_SESSION: return NULL; case PN_EXPIRE_WITH_CONNECTION: return "connection-close"; case PN_EXPIRE_NEVER: return "never"; } return NULL; } static void pni_map_local_handle(pn_link_t *link) { pn_link_state_t *state = &link->state; pn_session_state_t *ssn_state = &link->session->state; state->local_handle = allocate_alias(ssn_state->local_handles); pn_hash_put(ssn_state->local_handles, state->local_handle, link); pn_ep_incref(&link->endpoint); } int pn_process_link_setup(pn_transport_t *transport, pn_endpoint_t *endpoint) { if (transport->open_sent && (endpoint->type == SENDER || endpoint->type == RECEIVER)) { pn_link_t *link = (pn_link_t *) endpoint; pn_session_state_t *ssn_state = &link->session->state; pn_link_state_t *state = &link->state; if (((int16_t) ssn_state->local_channel >= 0) && !(endpoint->state & PN_LOCAL_UNINIT) && state->local_handle == (uint32_t) -1) { pni_map_local_handle(link); const pn_distribution_mode_t dist_mode = link->source.distribution_mode; if (link->target.type == PN_COORDINATOR) { int err = pn_post_frame(transport, AMQP_FRAME_TYPE, ssn_state->local_channel, "DL[SIoBB?DL[SIsIoC?sCnCC]DL[C]nnI]", ATTACH, pn_string_get(link->name), state->local_handle, endpoint->type == RECEIVER, link->snd_settle_mode, link->rcv_settle_mode, (bool) link->source.type, SOURCE, pn_string_get(link->source.address), link->source.durability, expiry_symbol(link->source.expiry_policy), link->source.timeout, link->source.dynamic, link->source.properties, (dist_mode != PN_DIST_MODE_UNSPECIFIED), dist_mode2symbol(dist_mode), link->source.filter, link->source.outcomes, link->source.capabilities, COORDINATOR, link->target.capabilities, 0); if (err) return err; } else { int err = pn_post_frame(transport, AMQP_FRAME_TYPE, ssn_state->local_channel, "DL[SIoBB?DL[SIsIoC?sCnCC]?DL[SIsIoCC]nnI]", ATTACH, pn_string_get(link->name), state->local_handle, endpoint->type == RECEIVER, link->snd_settle_mode, link->rcv_settle_mode, (bool) link->source.type, SOURCE, pn_string_get(link->source.address), link->source.durability, expiry_symbol(link->source.expiry_policy), link->source.timeout, link->source.dynamic, link->source.properties, (dist_mode != PN_DIST_MODE_UNSPECIFIED), dist_mode2symbol(dist_mode), link->source.filter, link->source.outcomes, link->source.capabilities, (bool) link->target.type, TARGET, pn_string_get(link->target.address), link->target.durability, expiry_symbol(link->target.expiry_policy), link->target.timeout, link->target.dynamic, link->target.properties, link->target.capabilities, 0); if (err) return err; } } } return 0; } int pn_post_flow(pn_transport_t *transport, pn_session_t *ssn, pn_link_t *link) { ssn->state.incoming_window = pn_session_incoming_window(ssn); ssn->state.outgoing_window = pn_session_outgoing_window(ssn); bool linkq = (bool) link; pn_link_state_t *state = &link->state; return pn_post_frame(transport, AMQP_FRAME_TYPE, ssn->state.local_channel, "DL[?IIII?I?I?In?o]", FLOW, (int16_t) ssn->state.remote_channel >= 0, ssn->state.incoming_transfer_count, ssn->state.incoming_window, ssn->state.outgoing_transfer_count, ssn->state.outgoing_window, linkq, linkq ? state->local_handle : 0, linkq, linkq ? state->delivery_count : 0, linkq, linkq ? state->link_credit : 0, linkq, linkq ? link->drain : false); } int pn_process_flow_receiver(pn_transport_t *transport, pn_endpoint_t *endpoint) { if (endpoint->type == RECEIVER && endpoint->state & PN_LOCAL_ACTIVE) { pn_link_t *rcv = (pn_link_t *) endpoint; pn_session_t *ssn = rcv->session; pn_link_state_t *state = &rcv->state; if ((int16_t) ssn->state.local_channel >= 0 && (int32_t) state->local_handle >= 0 && ((rcv->drain || state->link_credit != rcv->credit - rcv->queued) || !ssn->state.incoming_window)) { state->link_credit = rcv->credit - rcv->queued; return pn_post_flow(transport, ssn, rcv); } } return 0; } int pn_flush_disp(pn_transport_t *transport, pn_session_t *ssn) { uint64_t code = ssn->state.disp_code; bool settled = ssn->state.disp_settled; if (ssn->state.disp) { int err = pn_post_frame(transport, AMQP_FRAME_TYPE, ssn->state.local_channel, "DL[oIIo?DL[]]", DISPOSITION, ssn->state.disp_type, ssn->state.disp_first, ssn->state.disp_last, settled, (bool)code, code); if (err) return err; ssn->state.disp_type = 0; ssn->state.disp_code = 0; ssn->state.disp_settled = 0; ssn->state.disp_first = 0; ssn->state.disp_last = 0; ssn->state.disp = false; } return 0; } int pn_post_disp(pn_transport_t *transport, pn_delivery_t *delivery) { pn_link_t *link = delivery->link; pn_session_t *ssn = link->session; pn_session_state_t *ssn_state = &ssn->state; pn_modified(transport->connection, &link->session->endpoint, false); pn_delivery_state_t *state = &delivery->state; assert(state->init); bool role = (link->endpoint.type == RECEIVER); uint64_t code = delivery->local.type; if (!code && !delivery->local.settled) { return 0; } if (!pni_disposition_batchable(&delivery->local)) { pn_data_clear(transport->disp_data); pni_disposition_encode(&delivery->local, transport->disp_data); return pn_post_frame(transport, AMQP_FRAME_TYPE, ssn->state.local_channel, "DL[oIIo?DLC]", DISPOSITION, role, state->id, state->id, delivery->local.settled, (bool)code, code, transport->disp_data); } if (ssn_state->disp && code == ssn_state->disp_code && delivery->local.settled == ssn_state->disp_settled && ssn_state->disp_type == role) { if (state->id == ssn_state->disp_first - 1) { ssn_state->disp_first = state->id; return 0; } else if (state->id == ssn_state->disp_last + 1) { ssn_state->disp_last = state->id; return 0; } } if (ssn_state->disp) { int err = pn_flush_disp(transport, ssn); if (err) return err; } ssn_state->disp_type = role; ssn_state->disp_code = code; ssn_state->disp_settled = delivery->local.settled; ssn_state->disp_first = state->id; ssn_state->disp_last = state->id; ssn_state->disp = true; return 0; } int pn_process_tpwork_sender(pn_transport_t *transport, pn_delivery_t *delivery, bool *settle) { *settle = false; pn_link_t *link = delivery->link; pn_session_state_t *ssn_state = &link->session->state; pn_link_state_t *link_state = &link->state; bool xfr_posted = false; if ((int16_t) ssn_state->local_channel >= 0 && (int32_t) link_state->local_handle >= 0) { pn_delivery_state_t *state = &delivery->state; if (!state->sent && (delivery->done || pn_buffer_size(delivery->bytes) > 0) && ssn_state->remote_incoming_window > 0 && link_state->link_credit > 0) { if (!state->init) { state = pn_delivery_map_push(&ssn_state->outgoing, delivery); } pn_bytes_t bytes = pn_buffer_bytes(delivery->bytes); size_t full_size = bytes.size; pn_bytes_t tag = pn_buffer_bytes(delivery->tag); pn_data_clear(transport->disp_data); pni_disposition_encode(&delivery->local, transport->disp_data); int count = pn_post_amqp_transfer_frame(transport, ssn_state->local_channel, link_state->local_handle, state->id, &bytes, &tag, 0, // message-format delivery->local.settled, !delivery->done, ssn_state->remote_incoming_window, delivery->local.type, transport->disp_data); if (count < 0) return count; xfr_posted = true; ssn_state->outgoing_transfer_count += count; ssn_state->remote_incoming_window -= count; int sent = full_size - bytes.size; pn_buffer_trim(delivery->bytes, sent, 0); link->session->outgoing_bytes -= sent; if (!pn_buffer_size(delivery->bytes) && delivery->done) { state->sent = true; link_state->delivery_count++; link_state->link_credit--; link->queued--; link->session->outgoing_deliveries--; } pn_collector_put(transport->connection->collector, PN_OBJECT, link, PN_LINK_FLOW); } } pn_delivery_state_t *state = delivery->state.init ? &delivery->state : NULL; if ((int16_t) ssn_state->local_channel >= 0 && !delivery->remote.settled && state && state->sent && !xfr_posted) { int err = pn_post_disp(transport, delivery); if (err) return err; } *settle = delivery->local.settled && state && state->sent; return 0; } int pn_process_tpwork_receiver(pn_transport_t *transport, pn_delivery_t *delivery, bool *settle) { *settle = false; pn_link_t *link = delivery->link; // XXX: need to prevent duplicate disposition sending pn_session_t *ssn = link->session; if ((int16_t) ssn->state.local_channel >= 0 && !delivery->remote.settled && delivery->state.init) { int err = pn_post_disp(transport, delivery); if (err) return err; } // XXX: need to centralize this policy and improve it if (!ssn->state.incoming_window) { int err = pn_post_flow(transport, ssn, link); if (err) return err; } *settle = delivery->local.settled; return 0; } int pn_process_tpwork(pn_transport_t *transport, pn_endpoint_t *endpoint) { if (endpoint->type == CONNECTION && !transport->close_sent) { pn_connection_t *conn = (pn_connection_t *) endpoint; pn_delivery_t *delivery = conn->tpwork_head; while (delivery) { pn_delivery_t *tp_next = delivery->tpwork_next; bool settle = false; pn_link_t *link = delivery->link; pn_delivery_map_t *dm = NULL; if (pn_link_is_sender(link)) { dm = &link->session->state.outgoing; int err = pn_process_tpwork_sender(transport, delivery, &settle); if (err) return err; } else { dm = &link->session->state.incoming; int err = pn_process_tpwork_receiver(transport, delivery, &settle); if (err) return err; } if (settle) { pn_full_settle(dm, delivery); } else if (!pn_delivery_buffered(delivery)) { pn_clear_tpwork(delivery); } delivery = tp_next; } } return 0; } int pn_process_flush_disp(pn_transport_t *transport, pn_endpoint_t *endpoint) { if (endpoint->type == SESSION) { pn_session_t *session = (pn_session_t *) endpoint; pn_session_state_t *state = &session->state; if ((int16_t) state->local_channel >= 0 && !transport->close_sent) { int err = pn_flush_disp(transport, session); if (err) return err; } } return 0; } int pn_process_flow_sender(pn_transport_t *transport, pn_endpoint_t *endpoint) { if (endpoint->type == SENDER && endpoint->state & PN_LOCAL_ACTIVE) { pn_link_t *snd = (pn_link_t *) endpoint; pn_session_t *ssn = snd->session; pn_link_state_t *state = &snd->state; if ((int16_t) ssn->state.local_channel >= 0 && (int32_t) state->local_handle >= 0 && snd->drain && snd->drained) { pn_delivery_t *tail = snd->unsettled_tail; if (!tail || !pn_delivery_buffered(tail)) { state->delivery_count += state->link_credit; state->link_credit = 0; snd->drained = 0; return pn_post_flow(transport, ssn, snd); } } } return 0; } static void pni_unmap_local_handle(pn_link_t *link) { pn_link_state_t *state = &link->state; uintptr_t handle = state->local_handle; state->local_handle = -2; if (pn_hash_get(link->session->state.local_handles, handle)) { pn_ep_decref(&link->endpoint); } // may delete link pn_hash_del(link->session->state.local_handles, handle); } int pn_process_link_teardown(pn_transport_t *transport, pn_endpoint_t *endpoint) { if (endpoint->type == SENDER || endpoint->type == RECEIVER) { pn_link_t *link = (pn_link_t *) endpoint; pn_session_t *session = link->session; pn_session_state_t *ssn_state = &session->state; pn_link_state_t *state = &link->state; if (((endpoint->state & PN_LOCAL_CLOSED) || link->detached) && (int32_t) state->local_handle >= 0 && (int16_t) ssn_state->local_channel >= 0 && !transport->close_sent) { if (pn_link_is_sender(link) && pn_link_queued(link) && (int32_t) state->remote_handle != -2 && (int16_t) ssn_state->remote_channel != -2 && !transport->close_rcvd) return 0; const char *name = NULL; const char *description = NULL; pn_data_t *info = NULL; if (pn_condition_is_set(&endpoint->condition)) { name = pn_condition_get_name(&endpoint->condition); description = pn_condition_get_description(&endpoint->condition); info = pn_condition_info(&endpoint->condition); } int err = pn_post_frame(transport, AMQP_FRAME_TYPE, ssn_state->local_channel, "DL[Io?DL[sSC]]", DETACH, state->local_handle, !link->detached, (bool)name, ERROR, name, description, info); if (err) return err; pni_unmap_local_handle(link); } pn_clear_modified(transport->connection, endpoint); } return 0; } bool pn_pointful_buffering(pn_transport_t *transport, pn_session_t *session) { if (transport->close_rcvd) return false; if (!transport->open_rcvd) return true; pn_connection_t *conn = transport->connection; pn_link_t *link = pn_link_head(conn, 0); while (link) { if (pn_link_is_sender(link) && pn_link_queued(link) > 0) { pn_session_t *ssn = link->session; if (session && session == ssn) { if ((int32_t) link->state.remote_handle != -2 && (int16_t) session->state.remote_channel != -2) { return true; } } } link = pn_link_next(link, 0); } return false; } static void pni_unmap_local_channel(pn_session_t *ssn) { // XXX: should really update link state also pn_delivery_map_clear(&ssn->state.outgoing); pni_transport_unbind_handles(ssn->state.local_handles, false); pn_transport_t *transport = ssn->connection->transport; pn_session_state_t *state = &ssn->state; uintptr_t channel = state->local_channel; state->local_channel = -2; if (pn_hash_get(transport->local_channels, channel)) { pn_ep_decref(&ssn->endpoint); } // may delete session pn_hash_del(transport->local_channels, channel); } int pn_process_ssn_teardown(pn_transport_t *transport, pn_endpoint_t *endpoint) { if (endpoint->type == SESSION) { pn_session_t *session = (pn_session_t *) endpoint; pn_session_state_t *state = &session->state; if (endpoint->state & PN_LOCAL_CLOSED && (int16_t) state->local_channel >= 0 && !transport->close_sent) { if (pn_pointful_buffering(transport, session)) { return 0; } const char *name = NULL; const char *description = NULL; pn_data_t *info = NULL; if (pn_condition_is_set(&endpoint->condition)) { name = pn_condition_get_name(&endpoint->condition); description = pn_condition_get_description(&endpoint->condition); info = pn_condition_info(&endpoint->condition); } int err = pn_post_frame(transport, AMQP_FRAME_TYPE, state->local_channel, "DL[?DL[sSC]]", END, (bool) name, ERROR, name, description, info); if (err) return err; pni_unmap_local_channel(session); } pn_clear_modified(transport->connection, endpoint); } return 0; } int pn_process_conn_teardown(pn_transport_t *transport, pn_endpoint_t *endpoint) { if (endpoint->type == CONNECTION) { if (endpoint->state & PN_LOCAL_CLOSED && !transport->close_sent) { if (pn_pointful_buffering(transport, NULL)) return 0; int err = pn_post_close(transport, NULL, NULL); if (err) return err; transport->close_sent = true; } pn_clear_modified(transport->connection, endpoint); } return 0; } int pn_phase(pn_transport_t *transport, int (*phase)(pn_transport_t *, pn_endpoint_t *)) { pn_connection_t *conn = transport->connection; pn_endpoint_t *endpoint = conn->transport_head; while (endpoint) { pn_endpoint_t *next = endpoint->transport_next; int err = phase(transport, endpoint); if (err) return err; endpoint = next; } return 0; } int pn_process(pn_transport_t *transport) { int err; if ((err = pn_phase(transport, pn_process_conn_setup))) return err; if ((err = pn_phase(transport, pn_process_ssn_setup))) return err; if ((err = pn_phase(transport, pn_process_link_setup))) return err; if ((err = pn_phase(transport, pn_process_flow_receiver))) return err; // XXX: this has to happen two times because we might settle stuff // on the first pass and create space for more work to be done on the // second pass if ((err = pn_phase(transport, pn_process_tpwork))) return err; if ((err = pn_phase(transport, pn_process_tpwork))) return err; if ((err = pn_phase(transport, pn_process_flush_disp))) return err; if ((err = pn_phase(transport, pn_process_flow_sender))) return err; if ((err = pn_phase(transport, pn_process_link_teardown))) return err; if ((err = pn_phase(transport, pn_process_ssn_teardown))) return err; if ((err = pn_phase(transport, pn_process_conn_teardown))) return err; if (transport->connection->tpwork_head) { pn_modified(transport->connection, &transport->connection->endpoint, false); } return 0; } #define AMQP_HEADER ("AMQP\x00\x01\x00\x00") static ssize_t pn_output_write_amqp_header(pn_transport_t* transport, unsigned int layer, char* bytes, size_t available) { if (transport->trace & PN_TRACE_FRM) pn_transport_logf(transport, " -> %s", "AMQP"); assert(available >= 8); memmove(bytes, AMQP_HEADER, 8); if (transport->io_layers[layer] == &amqp_write_header_layer) { transport->io_layers[layer] = &amqp_layer; } else { transport->io_layers[layer] = &amqp_read_header_layer; } return 8; } static ssize_t pn_output_write_amqp(pn_transport_t* transport, unsigned int layer, char* bytes, size_t available) { if (transport->connection && !transport->done_processing) { int err = pn_process(transport); if (err) { pn_transport_logf(transport, "process error %i", err); transport->done_processing = true; } } // write out any buffered data _before_ returning PN_EOS, else we // could truncate an outgoing Close frame containing a useful error // status if (!transport->available && transport->close_sent) { return PN_EOS; } return pn_dispatcher_output(transport, bytes, available); } static void pni_close_head(pn_transport_t *transport) { if (!transport->head_closed) { transport->head_closed = true; pn_collector_t *collector = pni_transport_collector(transport); pn_collector_put(collector, PN_OBJECT, transport, PN_TRANSPORT_HEAD_CLOSED); pni_maybe_post_closed(transport); } } // generate outbound data, return amount of pending output else error static ssize_t transport_produce(pn_transport_t *transport) { if (transport->head_closed) return PN_EOS; ssize_t space = transport->output_size - transport->output_pending; if (space <= 0) { // can we expand the buffer? int more = 0; if (!transport->remote_max_frame) // no limit, so double it more = transport->output_size; else if (transport->remote_max_frame > transport->output_size) more = pn_min(transport->output_size, transport->remote_max_frame - transport->output_size); if (more) { char *newbuf = (char *)realloc( transport->output_buf, transport->output_size + more ); if (newbuf) { transport->output_buf = newbuf; transport->output_size += more; space += more; } } } while (space > 0) { ssize_t n; n = transport->io_layers[0]-> process_output( transport, 0, &transport->output_buf[transport->output_pending], space ); if (n > 0) { space -= n; transport->output_pending += n; } else if (n == 0) { break; } else { if (transport->output_pending) break; // return what is available if (transport->trace & (PN_TRACE_RAW | PN_TRACE_FRM)) { if (n < 0) { pn_transport_log(transport, " -> EOS"); } } pni_close_head(transport); return n; } } return transport->output_pending; } // deprecated ssize_t pn_transport_output(pn_transport_t *transport, char *bytes, size_t size) { if (!transport) return PN_ARG_ERR; ssize_t available = pn_transport_pending(transport); if (available > 0) { available = (ssize_t) pn_min( (size_t)available, size ); memmove( bytes, pn_transport_head(transport), available ); pn_transport_pop( transport, (size_t) available ); } return available; } void pn_transport_trace(pn_transport_t *transport, pn_trace_t trace) { transport->trace = trace; } void pn_transport_set_tracer(pn_transport_t *transport, pn_tracer_t tracer) { assert(transport); assert(tracer); transport->tracer = tracer; } pn_tracer_t pn_transport_get_tracer(pn_transport_t *transport) { assert(transport); return transport->tracer; } void pn_transport_set_context(pn_transport_t *transport, void *context) { assert(transport); pn_record_set(transport->context, PN_LEGCTX, context); } void *pn_transport_get_context(pn_transport_t *transport) { assert(transport); return pn_record_get(transport->context, PN_LEGCTX); } pn_record_t *pn_transport_attachments(pn_transport_t *transport) { assert(transport); return transport->context; } void pn_transport_log(pn_transport_t *transport, const char *message) { assert(transport); transport->tracer(transport, message); } void pn_transport_vlogf(pn_transport_t *transport, const char *fmt, va_list ap) { if (transport) { pn_string_vformat(transport->scratch, fmt, ap); pn_transport_log(transport, pn_string_get(transport->scratch)); } else { pn_vlogf(fmt, ap); } } void pn_transport_logf(pn_transport_t *transport, const char *fmt, ...) { va_list ap; va_start(ap, fmt); pn_transport_vlogf(transport, fmt, ap); va_end(ap); } uint16_t pn_transport_get_channel_max(pn_transport_t *transport) { return transport->channel_max; } void pn_transport_set_channel_max(pn_transport_t *transport, uint16_t channel_max) { transport->channel_max = channel_max; } uint16_t pn_transport_remote_channel_max(pn_transport_t *transport) { return transport->remote_channel_max; } uint32_t pn_transport_get_max_frame(pn_transport_t *transport) { return transport->local_max_frame; } void pn_transport_set_max_frame(pn_transport_t *transport, uint32_t size) { // if size == 0, no advertised limit to input frame size. if (size && size < AMQP_MIN_MAX_FRAME_SIZE) size = AMQP_MIN_MAX_FRAME_SIZE; transport->local_max_frame = size; } uint32_t pn_transport_get_remote_max_frame(pn_transport_t *transport) { return transport->remote_max_frame; } pn_millis_t pn_transport_get_idle_timeout(pn_transport_t *transport) { return transport->local_idle_timeout; } void pn_transport_set_idle_timeout(pn_transport_t *transport, pn_millis_t timeout) { transport->local_idle_timeout = timeout; } pn_millis_t pn_transport_get_remote_idle_timeout(pn_transport_t *transport) { return transport->remote_idle_timeout; } pn_timestamp_t pn_transport_tick(pn_transport_t *transport, pn_timestamp_t now) { pn_timestamp_t r = 0; for (int i = 0; i<PN_IO_LAYER_CT; ++i) { if (transport->io_layers[i] && transport->io_layers[i]->process_tick) r = pn_timestamp_min(r, transport->io_layers[i]->process_tick(transport, i, now)); } return r; } uint64_t pn_transport_get_frames_output(const pn_transport_t *transport) { if (transport) return transport->output_frames_ct; return 0; } uint64_t pn_transport_get_frames_input(const pn_transport_t *transport) { if (transport) return transport->input_frames_ct; return 0; } // input ssize_t pn_transport_capacity(pn_transport_t *transport) /* <0 == done */ { if (transport->tail_closed) return PN_EOS; //if (pn_error_code(transport->error)) return pn_error_code(transport->error); ssize_t capacity = transport->input_size - transport->input_pending; if ( capacity<=0 ) { // can we expand the size of the input buffer? int more = 0; if (!transport->local_max_frame) { // no limit (ha!) more = transport->input_size; } else if (transport->local_max_frame > transport->input_size) { more = pn_min(transport->input_size, transport->local_max_frame - transport->input_size); } if (more) { char *newbuf = (char *) realloc( transport->input_buf, transport->input_size + more ); if (newbuf) { transport->input_buf = newbuf; transport->input_size += more; capacity += more; } } } return capacity; } char *pn_transport_tail(pn_transport_t *transport) { if (transport && transport->input_pending < transport->input_size) { return &transport->input_buf[transport->input_pending]; } return NULL; } ssize_t pn_transport_push(pn_transport_t *transport, const char *src, size_t size) { assert(transport); ssize_t capacity = pn_transport_capacity(transport); if (capacity < 0) { return capacity; } else if (size > (size_t) capacity) { size = capacity; } char *dst = pn_transport_tail(transport); assert(dst); memmove(dst, src, size); int n = pn_transport_process(transport, size); if (n < 0) { return n; } else { return size; } } int pn_transport_process(pn_transport_t *transport, size_t size) { assert(transport); size = pn_min( size, (transport->input_size - transport->input_pending) ); transport->input_pending += size; transport->bytes_input += size; ssize_t n = transport_consume( transport ); if (n == PN_EOS) { pni_close_tail(transport); } if (n < 0 && n != PN_EOS) return n; return 0; } // input stream has closed int pn_transport_close_tail(pn_transport_t *transport) { pni_close_tail(transport); transport_consume( transport ); return 0; // XXX: what if not all input processed at this point? do we care??? } // output ssize_t pn_transport_pending(pn_transport_t *transport) /* <0 == done */ { assert(transport); return transport_produce( transport ); } const char *pn_transport_head(pn_transport_t *transport) { if (transport && transport->output_pending) { return transport->output_buf; } return NULL; } ssize_t pn_transport_peek(pn_transport_t *transport, char *dst, size_t size) { assert(transport); ssize_t pending = pn_transport_pending(transport); if (pending < 0) { return pending; } else if (size > (size_t) pending) { size = pending; } if (pending > 0) { const char *src = pn_transport_head(transport); assert(src); memmove(dst, src, size); } return size; } void pn_transport_pop(pn_transport_t *transport, size_t size) { if (transport) { assert( transport->output_pending >= size ); transport->output_pending -= size; transport->bytes_output += size; if (transport->output_pending) { memmove( transport->output_buf, &transport->output_buf[size], transport->output_pending ); } if (!transport->output_pending && pn_transport_pending(transport) < 0) { pni_close_head(transport); } } } int pn_transport_close_head(pn_transport_t *transport) { ssize_t pending = pn_transport_pending(transport); pni_close_head(transport); if (pending > 0) pn_transport_pop(transport, pending); return 0; } // true if the transport will not generate further output bool pn_transport_quiesced(pn_transport_t *transport) { if (!transport) return true; ssize_t pending = pn_transport_pending(transport); if (pending < 0) return true; // output done else if (pending > 0) return false; // no pending at transport, but check if data is buffered in I/O layers for (int layer = 0; layer<PN_IO_LAYER_CT; ++layer) { if (transport->io_layers[layer] && transport->io_layers[layer]->buffered_output && transport->io_layers[layer]->buffered_output( transport )) return false; } return true; } bool pn_transport_closed(pn_transport_t *transport) { assert(transport); ssize_t capacity = pn_transport_capacity(transport); ssize_t pending = pn_transport_pending(transport); return capacity < 0 && pending < 0; } pn_connection_t *pn_transport_connection(pn_transport_t *transport) { assert(transport); return transport->connection; }
jeckersb/Proton
proton-c/src/transport/transport.c
C
apache-2.0
90,312
<!-- This file is machine generated: DO NOT EDIT! --> # Losses (contrib) [TOC] Ops for building neural network losses. ## Other Functions and Classes - - - ### `tf.contrib.losses.absolute_difference(predictions, targets, weight=1.0, scope=None)` {#absolute_difference} Adds an Absolute Difference loss to the training procedure. `weight` acts as a coefficient for the loss. If a scalar is provided, then the loss is simply scaled by the given value. If `weight` is a tensor of size [batch_size], then the total loss for each sample of the batch is rescaled by the corresponding element in the `weight` vector. If the shape of `weight` matches the shape of `predictions`, then the loss of each measurable element of `predictions` is scaled by the corresponding value of `weight`. ##### Args: * <b>`predictions`</b>: The predicted outputs. * <b>`targets`</b>: The ground truth output tensor, same dimensions as 'predictions'. * <b>`weight`</b>: Coefficients for the loss a scalar, a tensor of shape [batch_size] or a tensor whose shape matches `predictions`. * <b>`scope`</b>: The scope for the operations performed in computing the loss. ##### Returns: A scalar `Tensor` representing the loss value. ##### Raises: * <b>`ValueError`</b>: If the shape of `predictions` doesn't match that of `targets` or if the shape of `weight` is invalid. - - - ### `tf.contrib.losses.add_loss(loss)` {#add_loss} Adds a externally defined loss to collection of losses. ##### Args: * <b>`loss`</b>: A loss `Tensor`. - - - ### `tf.contrib.losses.cosine_distance(predictions, targets, dim, weight=1.0, scope=None)` {#cosine_distance} Adds a cosine-distance loss to the training procedure. Note that the function assumes that the predictions and targets are already unit-normalized. ##### Args: * <b>`predictions`</b>: An arbitrary matrix. * <b>`targets`</b>: A `Tensor` whose shape matches 'predictions' * <b>`dim`</b>: The dimension along which the cosine distance is computed. * <b>`weight`</b>: Coefficients for the loss a scalar, a tensor of shape [batch_size] or a tensor whose shape matches `predictions`. * <b>`scope`</b>: The scope for the operations performed in computing the loss. ##### Returns: A scalar `Tensor` representing the loss value. ##### Raises: * <b>`ValueError`</b>: If predictions.shape doesn't match targets.shape, if the ignore mask is provided and its shape doesn't match targets.shape or if the ignore mask is not boolean valued. - - - ### `tf.contrib.losses.get_losses(scope=None)` {#get_losses} Gets the list of loss variables. ##### Args: * <b>`scope`</b>: an optional scope for filtering the losses to return. ##### Returns: a list of loss variables. - - - ### `tf.contrib.losses.get_regularization_losses(scope=None)` {#get_regularization_losses} Gets the regularization losses. ##### Args: * <b>`scope`</b>: an optional scope for filtering the losses to return. ##### Returns: A list of loss variables. - - - ### `tf.contrib.losses.get_total_loss(add_regularization_losses=True, name='total_loss')` {#get_total_loss} Returns a tensor whose value represents the total loss. Notice that the function adds the given losses to the regularization losses. ##### Args: * <b>`add_regularization_losses`</b>: A boolean indicating whether or not to use the regularization losses in the sum. * <b>`name`</b>: The name of the returned tensor. ##### Returns: A `Tensor` whose value represents the total loss. ##### Raises: * <b>`ValueError`</b>: if `losses` is not iterable. - - - ### `tf.contrib.losses.log_loss(predictions, targets, weight=1.0, epsilon=1e-07, scope=None)` {#log_loss} Adds a Log Loss term to the training procedure. `weight` acts as a coefficient for the loss. If a scalar is provided, then the loss is simply scaled by the given value. If `weight` is a tensor of size [batch_size], then the total loss for each sample of the batch is rescaled by the corresponding element in the `weight` vector. If the shape of `weight` matches the shape of `predictions`, then the loss of each measurable element of `predictions` is scaled by the corresponding value of `weight`. ##### Args: * <b>`predictions`</b>: The predicted outputs. * <b>`targets`</b>: The ground truth output tensor, same dimensions as 'predictions'. * <b>`weight`</b>: Coefficients for the loss a scalar, a tensor of shape [batch_size] or a tensor whose shape matches `predictions`. * <b>`epsilon`</b>: A small increment to add to avoid taking a log of zero. * <b>`scope`</b>: The scope for the operations performed in computing the loss. ##### Returns: A scalar `Tensor` representing the loss value. ##### Raises: * <b>`ValueError`</b>: If the shape of `predictions` doesn't match that of `targets` or if the shape of `weight` is invalid. - - - ### `tf.contrib.losses.sigmoid_cross_entropy(logits, multi_class_labels, weight=1.0, label_smoothing=0, scope=None)` {#sigmoid_cross_entropy} Creates a cross-entropy loss using tf.nn.sigmoid_cross_entropy_with_logits. `weight` acts as a coefficient for the loss. If a scalar is provided, then the loss is simply scaled by the given value. If `weight` is a tensor of size [`batch_size`], then the loss weights apply to each corresponding sample. If `label_smoothing` is nonzero, smooth the labels towards 1/2: new_multiclass_labels = multiclass_labels * (1 - label_smoothing) + 0.5 * label_smoothing ##### Args: * <b>`logits`</b>: [batch_size, num_classes] logits outputs of the network . * <b>`multi_class_labels`</b>: [batch_size, num_classes] target labels in (0, 1). * <b>`weight`</b>: Coefficients for the loss. The tensor must be a scalar, a tensor of shape [batch_size] or shape [batch_size, num_classes]. * <b>`label_smoothing`</b>: If greater than 0 then smooth the labels. * <b>`scope`</b>: The scope for the operations performed in computing the loss. ##### Returns: A scalar `Tensor` representing the loss value. ##### Raises: * <b>`ValueError`</b>: If the shape of `predictions` doesn't match that of `targets` or if the shape of `weight` is invalid or if `weight` is None. - - - ### `tf.contrib.losses.softmax_cross_entropy(logits, onehot_labels, weight=1.0, label_smoothing=0, scope=None)` {#softmax_cross_entropy} Creates a cross-entropy loss using tf.nn.softmax_cross_entropy_with_logits. `weight` acts as a coefficient for the loss. If a scalar is provided, then the loss is simply scaled by the given value. If `weight` is a tensor of size [`batch_size`], then the loss weights apply to each corresponding sample. If `label_smoothing` is nonzero, smooth the labels towards 1/num_classes: new_onehot_labels = onehot_labels * (1 - label_smoothing) + label_smoothing / num_classes ##### Args: * <b>`logits`</b>: [batch_size, num_classes] logits outputs of the network . * <b>`onehot_labels`</b>: [batch_size, num_classes] target one_hot_encoded labels. * <b>`weight`</b>: Coefficients for the loss. The tensor must be a scalar or a tensor of shape [batch_size]. * <b>`label_smoothing`</b>: If greater than 0 then smooth the labels. * <b>`scope`</b>: the scope for the operations performed in computing the loss. ##### Returns: A scalar `Tensor` representing the loss value. ##### Raises: * <b>`ValueError`</b>: If the shape of `predictions` doesn't match that of `targets` or if the shape of `weight` is invalid or if `weight` is None. - - - ### `tf.contrib.losses.sum_of_pairwise_squares(predictions, targets, weight=1.0, scope=None)` {#sum_of_pairwise_squares} Adds a pairwise-errors-squared loss to the training procedure. Unlike the sum_of_squares loss, which is a measure of the differences between corresponding elements of `predictions` and `targets`, sum_of_pairwise_squares is a measure of the differences between pairs of corresponding elements of `predictions` and `targets`. For example, if `targets`=[a, b, c] and `predictions`=[x, y, z], there are three pairs of differences are summed to compute the loss: loss = [ ((a-b) - (x-y)).^2 + ((a-c) - (x-z)).^2 + ((b-c) - (y-z)).^2 ] / 3 Note that since the inputs are of size [batch_size, d0, ... dN], the corresponding pairs are computed within each batch sample but not across samples within a batch. For example, if `predictions` represents a batch of 16 grayscale images of dimenion [batch_size, 100, 200], then the set of pairs is drawn from each image, but not across images. `weight` acts as a coefficient for the loss. If a scalar is provided, then the loss is simply scaled by the given value. If `weight` is a tensor of size [batch_size], then the total loss for each sample of the batch is rescaled by the corresponding element in the `weight` vector. ##### Args: * <b>`predictions`</b>: The predicted outputs, a tensor of size [batch_size, d0, .. dN] where N+1 is the total number of dimensions in `predictions`. * <b>`targets`</b>: The ground truth output tensor, whose shape must match the shape of the `predictions` tensor. * <b>`weight`</b>: Coefficients for the loss a scalar, a tensor of shape [batch_size] or a tensor whose shape matches `predictions`. * <b>`scope`</b>: The scope for the operations performed in computing the loss. ##### Returns: A scalar `Tensor` representing the loss value. ##### Raises: * <b>`ValueError`</b>: If the shape of `predictions` doesn't match that of `targets` or if the shape of `weight` is invalid. - - - ### `tf.contrib.losses.sum_of_squares(predictions, targets, weight=1.0, scope=None)` {#sum_of_squares} Adds a Sum-of-Squares loss to the training procedure. `weight` acts as a coefficient for the loss. If a scalar is provided, then the loss is simply scaled by the given value. If `weight` is a tensor of size [batch_size], then the total loss for each sample of the batch is rescaled by the corresponding element in the `weight` vector. If the shape of `weight` matches the shape of `predictions`, then the loss of each measurable element of `predictions` is scaled by the corresponding value of `weight`. ##### Args: * <b>`predictions`</b>: The predicted outputs. * <b>`targets`</b>: The ground truth output tensor, same dimensions as 'predictions'. * <b>`weight`</b>: Coefficients for the loss a scalar, a tensor of shape [batch_size] or a tensor whose shape matches `predictions`. * <b>`scope`</b>: The scope for the operations performed in computing the loss. ##### Returns: A scalar `Tensor` representing the loss value. ##### Raises: * <b>`ValueError`</b>: If the shape of `predictions` doesn't match that of `targets` or if the shape of `weight` is invalid.
HaebinShin/tensorflow
tensorflow/g3doc/api_docs/python/contrib.losses.md
Markdown
apache-2.0
10,783
var searchData= [ ['joystick',['Joystick',['../class_tri_1_1_input_1_1_joystick.html#a2b9df8a7989454ead020526745d49fdb',1,'Tri::Input::Joystick']]] ];
TriantEntertainment/TritonEngine
docs/html/search/functions_8.js
JavaScript
apache-2.0
153
[![CI Status](http://img.shields.io/travis/NickAger/aerogear-diffmatchpatch-ios.svg?style=flat)](https://travis-ci.org/NickAger/aerogear-diffmatchpatch-ios) [![Version](https://img.shields.io/cocoapods/v/DiffMatchPatch.svg?style=flat)](http://cocoapods.org/pods/DiffMatchPatch) [![License](https://img.shields.io/cocoapods/l/DiffMatchPatch.svg?style=flat)](http://cocoapods.org/pods/DiffMatchPatch) [![Platform](https://img.shields.io/cocoapods/p/DiffMatchPatch.svg?style=flat)](http://cocoapods.org/pods/DiffMatchPatch) # DiffMatchPatch for iOS / MacOSX The project is a fork of [google-diff-match-patch](https://github.com/JanX2/google-diff-match-patch) with modifications to get it to compile for iOS / MacOSX and Xcode 6.0 The speed test target and schema were removed to save time figuring out some issues but might later on. ## Prerequisites This project requires Xcode6.0 to run. ## Building Building can be done by opening the project in Xcode: open DiffMatchPatch.xcodeproj xcodebuild -scheme DiffMatchPatch build xcodebuild -scheme DiffMatchPatch-OSX build ## Testing Tests can be run from with in Xcode using Product->Test menu option (CMD+U). You can also run test from the command: xcodebuild -scheme DiffMatchPatch -destination 'platform=iOS Simulator,name=iPhone 5s' test xcodebuild -scheme DiffMatchPatch-OSX test ## Cocoapods This project can be made into a [CocoaPods](http://www.cocoapods.org/): First install the Cocoapods gem by running: sudo gem install cocoapods --pre Then you can verify that the podspec is correct: pod spec lint DiffMatchPatch.podspec --verbose --allow-warnings If all goes well you are ready to release. First, create a tag and push: git tag 'version' git push --tags Once the tag is available you can send the library to the Specs repo. For this you'll have to follow the instructions in ["Getting Setup with Trunk"]. pod trunk push DiffMatchPatch.podspec
feinstruktur/aerogear-diffmatchpatch-ios
README.md
Markdown
apache-2.0
1,967
.editor-titles { width: 100%; } .editor-title { width: calc(100%/3); float: left; margin-bottom: 10px; font-weight: bold; } .code-editor { background: rgb(88, 80, 78); color: white; border: 0; border-radius: 5px; width: calc(100%/3 - 5px); max-width: calc(100%/3 - 5px); box-sizing: border-box; height: 400px; outline: none; padding: 10px; }
ProgrammerKid/code-bit
app/ext/css/editor.css
CSS
apache-2.0
404
/** * Licensed to Jasig under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Jasig licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.openregistry.test.domain; import org.openregistry.core.domain.*; import org.openregistry.core.domain.sor.SorDisclosureSettings; import org.openregistry.core.domain.sor.SorName; import org.openregistry.core.domain.sor.SorRole; import org.openregistry.core.service.DisclosureRecalculationStrategy; import java.util.*; /** * @version $Revision$ $Date$ * @since 0.1 */ public class MockPerson implements Person { private ActivationKey activationKey = new MockActivationKey(UUID.randomUUID().toString(), new Date(), new Date()); private Set<Role> roles = new HashSet<Role>(); private long id = 1L; private Set<MockName> names = new HashSet<MockName>(); private MockEmailAddress mockEmailAddress = new MockEmailAddress(); private MockPhoneNumber mockPhoneNumber = new MockPhoneNumber(); private MockDisclosureSettings mockDisclosureSettings; private final Set<Identifier> identifiers = new HashSet<Identifier>(); private String gender; private Date dob; /** * Generates a new active, non-expired person with Identifier of type NETID */ public MockPerson() { this("testId", false, false); } /** * Generates a person with Identifier of type NETID with supplied value */ public MockPerson(final String identifierValue, final boolean notActive, final boolean expired) { if (notActive && expired) { throw new IllegalArgumentException("You're crazy!"); } final Date startDate; final Date endDate; if (notActive) { startDate = new Date(System.currentTimeMillis() + 50000); endDate = new Date(System.currentTimeMillis() + 50000000); } else if (expired) { startDate = new Date(System.currentTimeMillis() - 500000); endDate = new Date(System.currentTimeMillis() - 50000); } else { startDate = new Date(); endDate = new Date(System.currentTimeMillis() + 50000000); } this.activationKey = new MockActivationKey("key", startDate, endDate); this.addIdentifier(new MockIdentifierType("NETID", false), identifierValue); } public MockPerson(long id) { this.id = id; } public Long getId() { return this.id; } public void setId(final Long id) { this.id = id; } @Override public ContactEmailAddress getPreferredContactEmailAddress() { return this.mockEmailAddress; } @Override public ContactPhone getPreferredContactPhoneNumber() { return this.mockPhoneNumber; } public void addRole(final Role role) { this.roles.add(role); } public Set<? extends Name> getNames() { return this.names; } public Name addName() { final MockName name = new MockName(); this.names.add(name); return name; } public Name addName(Type type) { final MockName name = new MockName(type); this.names.add(name); return name; } @Override public void addName(final SorName sorName) { final MockName name = new MockName(); name.setType(sorName.getType()); name.setGiven(sorName.getGiven()); name.setFamily(sorName.getFamily()); name.setMiddle(sorName.getMiddle()); name.setPrefix(sorName.getPrefix()); name.setSuffix(sorName.getSuffix()); name.setSourceNameId(sorName.getId()); this.names.add(name); //To change body of implemented methods use File | Settings | File Templates. } public Set<Role> getRoles() { return this.roles; } public Role addRole(final SorRole sorRole) { final MockRole mockRole = new MockRole(sorRole); this.roles.add(mockRole); return mockRole; } public Set<Identifier> getIdentifiers() { return identifiers; } public Name getPreferredName() { return null; } public Name getOfficialName() { Set<? extends Name> names = this.getNames(); for(Name name: names) { if (name.isOfficialName()) { return name; } } return null; } public String getGender() { return this.gender; } public Date getDateOfBirth() { return this.dob; } public void setDateOfBirth(Date dateOfBirth) { this.dob = dateOfBirth; } public void setGender(String gender) { this.gender = gender; } public void calculateDisclosureSettings(SorDisclosureSettings ds) { if (ds != null) { this.mockDisclosureSettings = new MockDisclosureSettings(ds.getDisclosureCode(), ds.getLastUpdateDate(), ds.isWithinGracePeriod()); } else { this.mockDisclosureSettings = null; } } public DisclosureSettings getDisclosureSettings() { return this.mockDisclosureSettings; } public void setDisclosureSettingInfo(String disclosureCode, boolean isWithinGracePeriod, Date lastUpdatedDate) { this.mockDisclosureSettings = new MockDisclosureSettings (disclosureCode, lastUpdatedDate, isWithinGracePeriod); } public Identifier addIdentifier(IdentifierType identifierType, String value) { MockIdentifier mid = new MockIdentifier(this, identifierType, value); this.identifiers.add(mid); return mid; } @Override public void setIdentifierNotified(IdentifierType identifierType, Date date) { if (!identifierType.isNotifiable()) { throw new IllegalArgumentException("Only notifiable identifiers can have a notification date set"); } if (identifierType == null) { throw new IllegalArgumentException("Identifier type must be supplied"); } Identifier identiferToUpdate = this.getPrimaryIdentifiersByType().get(identifierType.getName()); if (identiferToUpdate != null && identiferToUpdate.getNotificationDate() == null) { identiferToUpdate.setNotificationDate(date); } else { throw new IllegalStateException("Identifier to be updated was not found"); } } public Name addOfficialName() { MockName name = new MockName(); name.setOfficialName(true); return name; } public Name addPreferredName() { MockName name = new MockName(); name.setPreferredName(true); return name; } public void setPreferredName(Name name) { //To change body of implemented methods use File | Settings | File Templates. } public Role pickOutRole(Type affiliationType) { for(final Role r : this.roles) { if(r.getAffiliationType().getDescription().equals(affiliationType.getDescription())) { return r; } } return null; } public Role pickOutRole(String affiliation) { for(final Role r : this.roles) { if(r.getAffiliationType().getDescription().equals(affiliation)) { return r; } } return null; } @Override public Map<String, Identifier> getPrimaryIdentifiersByType() { final Map<String,Identifier> primaryIdentifiers = new HashMap<String,Identifier>(); for (final Identifier identifier : getIdentifiers()) { if (identifier.isPrimary()) { primaryIdentifiers.put(identifier.getType().getName(), identifier); } } return primaryIdentifiers; } @Override public Map<String, Deque<Identifier>> getIdentifiersByType() { final Map<String, Deque<Identifier>> identifiersByType = new HashMap<String, Deque<Identifier>>(); for (final Identifier identifier : this.identifiers) { final String identifierType = identifier.getType().getName(); Deque<Identifier> listIdentifiers = identifiersByType.get(identifierType); if (listIdentifiers == null) { listIdentifiers = new ArrayDeque<Identifier>(); identifiersByType.put(identifierType, listIdentifiers); } if (identifier.isPrimary()) { listIdentifiers.addFirst(identifier); } else { listIdentifiers.addLast(identifier); } } return identifiersByType; } public ActivationKey generateNewActivationKey(Date start, Date end) { this.activationKey = new MockActivationKey(UUID.randomUUID().toString(), start, end); return this.activationKey; } public ActivationKey generateNewActivationKey(Date end) { this.activationKey = new MockActivationKey(UUID.randomUUID().toString(), new Date(), end); return this.activationKey; } public ActivationKey getCurrentActivationKey() { return this.activationKey; } public void removeCurrentActivationKey() { this.activationKey = null; } public Role findRoleBySoRRoleId(final Long sorRoleId) { for (final Role role : this.roles) { if (sorRoleId.equals(role.getSorRoleId())) { return role; } } return null; } @Override public Identifier findIdentifierByValue(String identifierType, String identifierValue) { final Map<String, Deque<Identifier>> identifiersByType = getIdentifiersByType(); Deque<Identifier> ids = identifiersByType.get(identifierType); if(ids == null) { return null; } Iterator<Identifier> iter = ids.iterator(); Identifier id = null; while(iter.hasNext()) { id = iter.next(); if(id.getValue().equals(identifierValue)) { return id; } } return null; } @Override public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final MockPerson that = (MockPerson) o; return id == that.id; } @Override public int hashCode() { return 31 * (int) (id ^ (id >>> 32)); } @Override public String toString() { return "MockPerson{" + "activationKey=" + activationKey + ", roles=" + roles + ", id=" + id + ", names=" + names + ", mockEmailAddress=" + mockEmailAddress + ", mockPhoneNumber=" + mockPhoneNumber + ", mockDisclosureSettings=" + mockDisclosureSettings + ", identifiers=" + identifiers + ", gender='" + gender + '\'' + ", dob=" + dob + '}'; } }
Unicon/openregistry
openregistry-test-support/src/main/java/org/jasig/openregistry/test/domain/MockPerson.java
Java
apache-2.0
11,410
/* * Copyright (c) 2015-2016 Fraunhofer FOKUS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ .morris-hover{position:absolute;z-index:1000}.morris-hover.morris-default-style{border-radius:10px;padding:6px;color:#666;background:rgba(255,255,255,0.8);border:solid 2px rgba(230,230,230,0.8);font-family:sans-serif;font-size:12px;text-align:center}.morris-hover.morris-default-style .morris-hover-row-label{font-weight:bold;margin:0.25em 0} .morris-hover.morris-default-style .morris-hover-point{white-space:nowrap;margin:0.1em 0}
nubomedia/nubomedia-paas
src/main/resources/static/bower_components/morrisjs/morris.css
CSS
apache-2.0
1,061
export GOPATH:=$(shell pwd) GO ?= go PKG := ./src/diato/ # TODO: Do we also want to run with debug in production? # the github.com/rjeczalik/notify prints a lot of debug # stuff when this is set. BUILDTAGS := debug VERSION ?= $(shell git describe --dirty --tags | sed 's/^v//' ) .PHONY: default default: all # find src/ -name .git -type d | sed -s 's/.git$//' | while read line; do echo -n "${line} " | sed 's/.\/src\///'; git -C $line rev-parse HEAD; done | sort > GLOCKFILE .PHONY: deps deps: go get -tags '$(BUILDTAGS)' -d -v diato/... go get github.com/robfig/glock git diff /dev/null GLOCKFILE | ./bin/glock apply . .PHONY: diato diato: deps binary .PHONY: binary binary: LDFLAGS += -X "main.buildTag=v$(VERSION)" binary: LDFLAGS += -X "main.buildTime=$(shell date -u '+%Y-%m-%d %H:%M:%S UTC')" binary: go install -tags '$(BUILDTAGS)' -ldflags '$(LDFLAGS)' diato # go install -race -tags '$(BUILDTAGS)' -ldflags '$(LDFLAGS)' diato .PHONY: release release: BUILDTAGS=release release: diato .PHONY: fmt fmt: go fmt diato/... .PHONY: all all: fmt diato .PHONY: clean clean: rm -rf bin/ rm -rf pkg/ rm -rf src/diato/assets/ go clean -i -r diato .PHONY: test test: go test -tags '$(BUILDTAGS)' -ldflags '$(LDFLAGS)' diato/...
Freeaqingme/diato
Makefile
Makefile
apache-2.0
1,261
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta http-equiv="X-UA-COMPATIBLE" content="IE=edge,chrome=1"> <title>Greenshocks 101 - labels</title> <meta name="description" content="labels"> <meta name="viewport" content="width=device-width, initial-scale=1"> <link rel="stylesheet" href="./styles.css"> <script src="../js/vendor/modernizr-2.8.3-respond-1.4.2.min.js"></script> </head> <body> <!--[if lt IE 8]> <p class="browsehappy">You are using an <strong>outdated</strong> browser. Please <a href="http://browsehappy.com/">upgrade your browser</a> to improve your experience.</p> <![endif]--> <div id="demoWrapper"> <div id="bg"></div> <div id="content"> <h1>Freakishly Robust</h1> <h2>With features that makes other engines look like cheap toys</h2> <div id="info"><img src="http://www.greensock.com/wp-content/uploads/custom/codepen/feature_robust.png" width="240" height="151" id="feature"> <p id="description">Animate colors, beziers, css properties, arrays, scrolls and lots more. Round values, smoothly reverse() on the fly, use relative values, employ virtually any easing equation, and manage conflicting tweens like a pro. GSAP does all this and much more with ease.</p> </div> <div id="nav"> <img src="http://www.greensock.com/wp-content/uploads/custom/codepen/icon_robust.png" width="83" height="59"><img src="http://www.greensock.com/wp-content/uploads/custom/codepen/icon_overwrite.png" width="43" height="59"><img src="http://www.greensock.com/wp-content/uploads/custom/codepen/icon_compatible.png" width="73" height="59"><img src="http://www.greensock.com/wp-content/uploads/custom/codepen/icon_support.png" width="83" height="59"><img src="http://www.greensock.com/wp-content/uploads/custom/codepen/icon_plugin.png" width="76" height="59"> </div> </div> </div> <div> <button id="play">play</button> <button id="pause">pause</button> <button id="reverse">reverse</button> <button id="resume">resume</button> <button id="restart">restart</button> </div> <div id="sliderWrapper"> <div id="slider"></div> </div> <script src="../../jspm_packages/system.js"></script> <script src="../../config.js"></script> <script> System.config({ map: { TweenLite: './../../node_modules/gsap/src/uncompressed/TweenLite.js', CSSPlugin: './../../node_modules/gsap/src/uncompressed/plugins/CSSPlugin.js', TimelineLite: './../../node_modules/gsap/src/uncompressed/TimelineLite.js' } }); System.import('./main'); </script> </body> </html>
martin-fabbri/greensock-playground
greenshock-101/7-timelinelite/index.html
HTML
apache-2.0
2,801
<?php /*************************************************************************** * copyright : (C) 2008 - 2016 WeBid * site : http://www.webidsupport.com/ ***************************************************************************/ /*************************************************************************** * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. Although none of the code may be * sold. If you have been sold this script, get a refund. ***************************************************************************/ define('InAdmin', 1); $current_page = 'fees'; include '../common.php'; include INCLUDE_PATH . 'functions_admin.php'; include 'loggedin.inc.php'; $fees = array( //0 = single value, 1 = staged fees 'signup_fee' => 0, 'buyer_fee' => 1, 'setup_fee' => 1, 'featured_fee' => 0, 'bold_fee' => 0, 'highlighted_fee' => 0, 'subtitle_fee' => 0, 'extracat_fee' => 0, 'reserve_fee' => 0, 'picture_fee' => 0, 'relist_fee' => 0, 'buynow_fee' => 0, 'endauc_fee' => 1 ); $feenames = array( 'signup_fee' => $MSG['430'], 'buyer_fee' => $MSG['775'], 'setup_fee' => $MSG['432'], 'featured_fee' => $MSG['433'], 'bold_fee' => $MSG['439'], 'highlighted_fee' => $MSG['434'], 'subtitle_fee' => $MSG['803'], 'extracat_fee' => $MSG['804'], 'reserve_fee' => $MSG['440'], 'picture_fee' => $MSG['435'], 'relist_fee' => $MSG['437'], 'buynow_fee' => $MSG['436'], 'endauc_fee' => $MSG['791'] ); if(isset($_GET['type']) && isset($fees[$_GET['type']])) { if($fees[$_GET['type']] == 0) { if(isset($_POST['action']) && $_POST['action'] == 'update') { if(!$system->CheckMoney($_POST['value'])) { $errmsg = $ERR_058; } else { $query = "UPDATE " . $DBPrefix . "fees SET value = :value WHERE type = :type"; $params = array(); $params[] = array(':value', $system->input_money($_POST['value']), 'float'); $params[] = array(':type', $_GET['type'], 'str'); $db->query($query, $params); $errmsg = $feenames[$_GET['type']] . $MSG['359']; } } $query = "SELECT value FROM " . $DBPrefix . "fees WHERE type = :type"; $params = array(); $params[] = array(':type', $_GET['type'], 'str'); $db->query($query, $params); $value = $db->result('value'); $template->assign_vars(array( 'VALUE' => $system->print_money_nosymbol($value), 'CURRENCY' => $system->SETTINGS['currency'] )); } elseif($fees[$_GET['type']] == 1) { $level_added = false; if(isset($_POST['action']) && $_POST['action'] == 'update') { for($i = 0; $i < count($_POST['tier_id']); $i++) { $value = $_POST['value'][$i]; if ($_POST['type'][$i] == 'flat') { $value = $system->input_money($value); } $query = "UPDATE " . $DBPrefix . "fees SET fee_from = :fee_from, fee_to = :fee_to, value = :value, fee_type = :fee_type WHERE id = :fee_id"; $params = array(); $params[] = array(':fee_from', $system->input_money($_POST['fee_from'][$i]), 'float'); $params[] = array(':fee_to', $system->input_money($_POST['fee_to'][$i]), 'float'); $params[] = array(':value', $value, 'float'); $params[] = array(':fee_type', $_POST['type'][$i], 'str'); $params[] = array(':fee_id', $_POST['tier_id'][$i], 'int'); $db->query($query, $params); $errmsg = $feenames[$_GET['type']] . $MSG['359']; } if (isset($_POST['fee_delete'])) { for($i = 0; $i < count($_POST['fee_delete']); $i++) { $query = "DELETE FROM " . $DBPrefix . "fees WHERE id = :fee_id"; $params = array(); $params[] = array(':fee_id', $_POST['fee_delete'][$i], 'int'); $db->query($query, $params); } } if(!empty($_POST['new_fee_from']) && !empty($_POST['new_fee_to']) && !empty($_POST['new_value']) && !empty($_POST['new_type'])) { if ($_POST['new_fee_from'] <= $_POST['new_fee_to']) { $value = $_POST['new_value']; if ($_POST['new_type'] == 'flat') { $value = $system->input_money($value); } $query = "INSERT INTO " . $DBPrefix . "fees VALUES (NULL, :fee_from, :fee_to, :new_type, :value, :type)"; $params = array(); $params[] = array(':fee_from', $system->input_money($_POST['new_fee_from']), 'float'); $params[] = array(':fee_to', $system->input_money($_POST['new_fee_to']), 'float'); $params[] = array(':new_type', $_POST['new_type'], 'str'); $params[] = array(':value', $value, 'float'); $params[] = array(':type', $_GET['type'], 'str'); $db->query($query, $params); $level_added = true; } else { $errmsg = $ERR_713; } } } $query = "SELECT * FROM " . $DBPrefix . "fees WHERE type = :type ORDER BY fee_from ASC"; $params = array(); $params[] = array(':type', $_GET['type'], 'str'); $db->query($query, $params); while($row = $db->fetch()) { $template->assign_block_vars('fees', array( 'ID' => $row['id'], 'FROM' => $system->print_money_nosymbol($row['fee_from']), 'TO' => $system->print_money_nosymbol($row['fee_to']), 'FLATTYPE' => ($row['fee_type'] == 'flat') ? ' selected="selected"' : '', 'PERCTYPE' => ($row['fee_type'] == 'perc') ? ' selected="selected"' : '', 'VALUE' => ($row['fee_type'] == 'flat') ? $system->print_money_nosymbol($row['value']) : $row['value'] )); } $template->assign_vars(array( 'CURRENCY' => $system->SETTINGS['currency'], 'FEE_FROM' => (isset($_POST['new_fee_from']) && !$level_added) ? $_POST['new_fee_from'] : '', 'FEE_TO' => (isset($_POST['new_fee_to']) && !$level_added) ? $_POST['new_fee_to'] : '', 'FEE_VALUE' => (isset($_POST['new_value']) && !$level_added) ? $_POST['new_value'] : '', 'FEE_TYPE' => (isset($_POST['new_type']) && !$level_added) ? $_POST['new_type'] : '' )); } } $query = "SELECT COUNT(id) as count FROM " . $DBPrefix . "payment_options WHERE is_gateway = 1 AND gateway_admin_address != ''"; $db->direct_query($query); $gateway_check = $db->result('count'); $template->assign_vars(array( 'SITEURL' => $system->SETTINGS['siteurl'], 'B_NOT_SETUP_CORRECTLY' => ($gateway_check == 0), 'B_SINGLE' => (isset($_GET['type']) && isset($fees[$_GET['type']]) && $fees[$_GET['type']] == 0) ? true : false, 'FEETYPE' => (isset($_GET['type']) && isset($feenames[$_GET['type']])) ? $feenames[$_GET['type']] : '' )); include 'header.php'; $template->set_filenames(array( 'body' => 'fees.tpl' )); $template->display('body'); include 'footer.php'; ?>
nath-haran/jivass
WeBid-1.2.1/admin/fees.php
PHP
apache-2.0
6,662
# 1.0.0 (unfixed) - **Nouveautés - Autoloader retravaillé, tous les fichiers et ceux des sous répertoires du dossier "core" sont inclus au projet. - Création d'un fichier de configuration `settings.php` permettant de définir des variables superglobales. - Bootstrap d'initialisation des variables de l'application ## 0.2.0 (28 Août 2013) - **Fonctionnalité** - Lorsqu'un champ n'est pas bien renseigné dans le formulaire d'envoi, les informations correctement saisies sont enregistrées en session pour les conserver dans le formulaire ### 0.1.1 (26 Août 2013) - **Bug** - La fonction getNameOf retourne désormais le nom de la personne associé au numéro envoyé en paramètre ## 0.1.0 (25 Août 2013) Première version de l'application flashsms - **Fonctionnalités** - Envoyer un sms - Parser au format json et afficher les sms reçus - Envoyer un sms PDU class 0 - Gérer un annuaire de contacts, association nom avec numéro uniquement - Gérer des listes de diffusions de sms - **Contraintes** - Utilisation du logiciel gammu - Il faut brancher et configurer une clé 3G - Il faut une carte sim avec abonnement SMS - Développé sur un raspberry pi modèle B ## TODO : 1. Avoir un système de conversation avec une personne en particulier pour ne pas perdre le fil de la discussion 2. Lots of things ...
r0mdau/flashsms
CHANGELOG.md
Markdown
apache-2.0
1,359
package org.openengsb.loom.java; import org.openengsb.core.api.AliveState; import org.openengsb.core.api.Connector; import org.openengsb.domain.example.ExampleDomain; import org.openengsb.domain.example.event.LogEvent; import org.openengsb.domain.example.model.ExampleRequestModel; import org.openengsb.domain.example.model.ExampleResponseModel; public class ExampleConnector implements ExampleDomain, Connector { @Override public String getInstanceId() { // TODO Auto-generated method stub return null; } @Override public AliveState getAliveState() { return AliveState.ONLINE; } @Override public ExampleResponseModel doSomethingWithModel(ExampleRequestModel arg0) { ExampleResponseModel result = new ExampleResponseModel(); result.setResult(arg0.getName()); return result; } @Override public String doSomethingWithMessage(String arg0) { return "42"; } @Override public String doSomethingWithLogEvent(LogEvent arg0) { // TODO Auto-generated method stub return null; } @Override public String getConnectorId() { // TODO Auto-generated method stub return null; } @Override public String getDomainId() { // TODO Auto-generated method stub return null; } @Override public void setConnectorId(String arg0) { // TODO Auto-generated method stub } @Override public void setDomainId(String arg0) { // TODO Auto-generated method stub } }
openengsb/loom-java
bridge/src/test/java/org/openengsb/loom/java/ExampleConnector.java
Java
apache-2.0
1,568
package org.pedrohos.business.notas; import java.math.BigInteger; import org.pedrohos.model.dto.NotaDTO; public abstract class NotaDefault { public NotaDTO calcula(BigInteger valor) { NotaDTO nota = new NotaDTO(); nota.setNota(getValorNota()); BigInteger quantidade = valor.divide(getValorNota()); if( quantidade.compareTo(BigInteger.ONE) >= 0 ) { nota.setQuantidade(quantidade); nota.setValorRestante(valor.subtract(getValorNota().multiply(quantidade))); } else { nota.setQuantidade(BigInteger.ZERO); nota.setValorRestante(valor); } return nota; } protected abstract BigInteger getValorNota(); }
pedro-hos/caixa-eletronico
ce-core/src/main/java/org/pedrohos/business/notas/NotaDefault.java
Java
apache-2.0
646
/******************************************************************************* * Copyright 2016 Jalian Systems Pvt. Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package net.sourceforge.marathon.resource.navigator; import static java.nio.file.FileVisitResult.CONTINUE; import static java.nio.file.FileVisitResult.SKIP_SUBTREE; import static java.nio.file.StandardCopyOption.COPY_ATTRIBUTES; import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; import java.io.IOException; import java.nio.file.CopyOption; import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileSystemLoopException; import java.nio.file.FileVisitOption; import java.nio.file.FileVisitResult; import java.nio.file.FileVisitor; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileTime; import java.util.EnumSet; import java.util.Optional; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import javafx.scene.control.ButtonType; import net.sourceforge.marathon.resource.ResourceView.Operation; /** * Sample code that copies files in a similar manner to the cp(1) program. */ public class Copy { public static final Logger LOGGER = Logger.getLogger(Copy.class.getName()); /** * Copy source file to target location. * * @return */ static boolean copyFile(Path source, Path target) { CopyOption[] options = new CopyOption[] { COPY_ATTRIBUTES, REPLACE_EXISTING }; target = getUnique(target); try { Files.copy(source, target, options); return true; } catch (Exception x) { System.err.format("Unable to copy: %s: %s%n", source, x); return false; } } private static Pattern pattern1 = Pattern.compile("(.*)_copy"); private static Pattern pattern2 = Pattern.compile("(.*)_copy_(\\d+)"); private static Path getUnique(Path target) { String fileName = target.getFileName().toString(); String name = com.google.common.io.Files.getNameWithoutExtension(fileName); String ext = com.google.common.io.Files.getFileExtension(fileName); if (!ext.equals("")) { ext = "." + ext; } while (!Files.notExists(target)) { Matcher matcher = pattern2.matcher(name); if (matcher.matches()) { name = matcher.group(1) + "_copy_" + (Integer.parseInt(matcher.group(2)) + 1); } else { matcher = pattern1.matcher(name); if (matcher.matches()) { name = matcher.group(1) + "_copy_2"; } else { name = name + "_copy"; } } target = target.resolveSibling(name + ext); } return target; } /** * A {@code FileVisitor} that copies a file-tree ("cp -r") */ static class TreeCopier implements FileVisitor<Path> { private final Path source; private final Path target; private Operation operation; TreeCopier(Path source, Path target, Operation operation) { this.source = source; this.target = target; this.operation = operation; } @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) { // before visiting entries in a directory we copy the directory // (okay if directory already exists). CopyOption[] options = new CopyOption[] { COPY_ATTRIBUTES }; Path newdir = target.resolve(source.relativize(dir)); try { Files.copy(dir, newdir, options); } catch (FileAlreadyExistsException x) { // ignore } catch (IOException x) { System.err.format("Unable to create: %s: %s%n", newdir, x); return SKIP_SUBTREE; } return CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { if (copyFile(file, target.resolve(source.relativize(file)))) { if (operation == Operation.CUT) { try { Files.delete(file); } catch (IOException e) { System.err.format("Unable to create: %s: %s%n", file, e); } } } return CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) { // fix up modification time of directory when done if (exc == null) { Path newdir = target.resolve(source.relativize(dir)); try { FileTime time = Files.getLastModifiedTime(dir); Files.setLastModifiedTime(newdir, time); } catch (IOException x) { System.err.format("Unable to copy all attributes to: %s: %s%n", newdir, x); } try { if (operation == Operation.CUT) { Files.delete(dir); } } catch (IOException e) { System.err.format("Unable to delete directory: %s: %s%n", newdir, e); } } return CONTINUE; } @Override public FileVisitResult visitFileFailed(Path file, IOException exc) { if (exc instanceof FileSystemLoopException) { System.err.println("cycle detected: " + file); } else { System.err.format("Unable to copy: %s: %s%n", file, exc); } return CONTINUE; } } static class TreeDeleter implements FileVisitor<Path> { private Optional<ButtonType> option; public TreeDeleter(Optional<ButtonType> option) { this.option = option; } @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.delete(file); return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { Files.delete(dir); return FileVisitResult.CONTINUE; } public Optional<ButtonType> getOption() { return option; } } public static Path copy(Path source, Path target, Operation operation) throws IOException { if (source.equals(target)) { return null; } Path dest = target.resolve(source.getFileName()); if (operation == Operation.CUT && dest.equals(source)) { return null; } dest = getUnique(dest); // follow links when copying files EnumSet<FileVisitOption> opts = EnumSet.of(FileVisitOption.FOLLOW_LINKS); TreeCopier tc = new TreeCopier(source, dest, operation); Files.walkFileTree(source, opts, Integer.MAX_VALUE, tc); return dest; } public static Optional<ButtonType> delete(Path path, Optional<ButtonType> option) throws IOException { TreeDeleter td = new TreeDeleter(option); Files.walkFileTree(path, td); return td.getOption(); } }
jalian-systems/marathonv5
marathon-core/src/main/java/net/sourceforge/marathon/resource/navigator/Copy.java
Java
apache-2.0
8,386
# Taraxacum staticifolium Soest SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Taraxacum staticifolium/README.md
Markdown
apache-2.0
179
/*! \file main.c \brief CAN networking communication in normal mode */ /* Copyright (C) 2016 GigaDevice 2014-12-26, V1.0.0, firmware for GD32F1x0(x=3,5) 2016-01-15, V2.0.0, firmware for GD32F1x0(x=3,5,7,9) 2016-04-30, V3.0.0, firmware update for GD32F1x0(x=3,5,7,9) */ #include "gd32f1x0.h" #include <stdio.h> #include "gd32f1x0_eval.h" /* select can */ //#define CAN0_USED #define CAN1_USED #ifdef CAN0_USED #define CANX CAN0 #else #define CANX CAN1 #endif extern FlagStatus receive_flag; uint8_t transmit_number = 0x0; extern can_receive_message_struct receive_message; can_trasnmit_message_struct transmit_message; void nvic_config(void); void led_config(void); void gpio_config(void); ErrStatus can_networking(void); void can_networking_init(can_parameter_struct can_parameter, can_filter_parameter_struct can_filter); void delay(void); /*! \brief main function \param[in] none \param[out] none \retval none */ int main(void) { can_parameter_struct can_init_parameter; can_filter_parameter_struct can_filter_parameter; receive_flag = RESET; /* configure Tamper key */ gd_eval_keyinit(KEY_TAMPER, KEY_MODE_GPIO); /* configure GPIO */ gpio_config(); /* configure USART */ gd_eval_COMinit(EVAL_COM2); /* configure NVIC */ nvic_config(); /* configure leds */ led_config(); /* set all leds off */ gd_eval_ledoff(LED1); gd_eval_ledoff(LED2); gd_eval_ledoff(LED3); gd_eval_ledoff(LED4); /* initialize CAN */ can_networking_init(can_init_parameter, can_filter_parameter); /* enable phy */ #ifdef CAN0_USED can_phy_enable(CANX); #endif /* enable CAN receive FIFO0 not empty interrupt */ can_interrupt_enable(CANX, CAN_INTEN_RFNEIE0); /* initialize transmit message */ transmit_message.can_tx_sfid = 0x321; transmit_message.can_tx_efid = 0x01; transmit_message.can_tx_ft = CAN_FT_DATA; transmit_message.can_tx_ff = CAN_FF_STANDARD; transmit_message.can_tx_dlen = 1; printf("please press the Tamper key to transmit data!\r\n"); while(1){ /* waiting for the Tamper key pressed */ while(0 == gd_eval_keygetstate(KEY_TAMPER)){ /* if transmit_number is 0x10, set it to 0x00 */ if(transmit_number == 0x10){ transmit_number = 0x00; }else{ transmit_message.can_tx_data[0] = transmit_number++; printf("transmit data: %x\r\n", transmit_message.can_tx_data[0]); /* transmit message */ can_transmit_message(CANX, &transmit_message); delay(); /* waiting for Tamper key up */ while(0 == gd_eval_keygetstate(KEY_TAMPER)); } } if(SET == receive_flag){ gd_eval_ledtoggle(LED1); receive_flag = RESET; printf("recive data: %x\r\n", receive_message.can_rx_data[0]); } } } /*! \brief initialize CAN and filter \param[in] can_parameter \arg can_parameter_struct \param[in] can_filter \arg can_filter_parameter_struct \param[out] none \retval none */ void can_networking_init(can_parameter_struct can_parameter, can_filter_parameter_struct can_filter) { /* initialize CAN register */ can_deinit(CANX); /* initialize CAN */ can_parameter.can_ttc=DISABLE; can_parameter.can_abor=DISABLE; can_parameter.can_awu=DISABLE; can_parameter.can_ard=DISABLE; can_parameter.can_rfod=DISABLE; can_parameter.can_tfo=DISABLE; can_parameter.can_mode=CAN_NORMAL_MODE; can_parameter.can_sjw=CAN_BT_SJW_1TQ; can_parameter.can_bs1=CAN_BT_BS1_3TQ; can_parameter.can_bs2=CAN_BT_BS2_2TQ; /* baudrate 1Mbps */ can_parameter.can_psc=12; can_init(CANX, &can_parameter); /* initialize filter */ #ifdef CAN0_USED /* CAN0 filter number */ can_filter.can_filter_number=0; #else /* CAN1 filter number */ can_filter.can_filter_number=15; #endif /* initialize filter */ can_filter.can_filter_mode = CAN_FILTERMODE_MASK; can_filter.can_filter_bits = CAN_FILTERBITS_32BIT; can_filter.can_filter_list_high = 0x0000; can_filter.can_filter_list_low = 0x0000; can_filter.can_filter_mask_high = 0x0000; can_filter.can_filter_mask_low = 0x0000; can_filter.can_filter_fifo_number = CAN_FIFO0; can_filter.can_filter_enable=ENABLE; can_filter_init(&can_filter); } /*! \brief configure the nested vectored interrupt controller \param[in] none \param[out] none \retval none */ void nvic_config(void) { #ifdef CAN0_USED /* configure CAN0 NVIC */ nvic_irq_enable(CAN0_RX0_IRQn,0,0); #else /* configure CAN1 NVIC */ nvic_irq_enable(CAN1_RX0_IRQn,0,0); #endif } /*! \brief delay \param[in] none \param[out] none \retval none */ void delay(void) { uint16_t nTime = 0x0000; for(nTime = 0; nTime < 0xFFFF; nTime++){ } } /*! \brief configure the leds \param[in] none \param[out] none \retval none */ void led_config(void) { gd_eval_ledinit (LED1); gd_eval_ledinit (LED2); gd_eval_ledinit (LED3); gd_eval_ledinit (LED4); } /*! \brief configure GPIO \param[in] none \param[out] none \retval none */ void gpio_config(void) { /* enable can clock */ rcu_periph_clock_enable(RCU_CAN0); rcu_periph_clock_enable(RCU_CAN1); rcu_periph_clock_enable(RCU_GPIOA); rcu_periph_clock_enable(RCU_GPIOB); /* configure CAN0 GPIO */ gpio_output_options_set(GPIOA, GPIO_OTYPE_PP, GPIO_OSPEED_50MHZ,GPIO_PIN_5); gpio_mode_set(GPIOA, GPIO_MODE_ANALOG, GPIO_PUPD_NONE, GPIO_PIN_5); gpio_output_options_set(GPIOA, GPIO_OTYPE_PP, GPIO_OSPEED_50MHZ,GPIO_PIN_6); gpio_mode_set(GPIOA, GPIO_MODE_ANALOG, GPIO_PUPD_NONE, GPIO_PIN_6); /* configure CAN1 GPIO */ gpio_output_options_set(GPIOB, GPIO_OTYPE_PP, GPIO_OSPEED_50MHZ,GPIO_PIN_12); gpio_mode_set(GPIOB, GPIO_MODE_AF, GPIO_PUPD_NONE, GPIO_PIN_12); gpio_af_set(GPIOB, GPIO_AF_9, GPIO_PIN_12); gpio_output_options_set(GPIOB, GPIO_OTYPE_PP, GPIO_OSPEED_50MHZ,GPIO_PIN_13); gpio_mode_set(GPIOB, GPIO_MODE_AF, GPIO_PUPD_NONE, GPIO_PIN_13); gpio_af_set(GPIOB, GPIO_AF_9, GPIO_PIN_13); } /* retarget the C library printf function to the usart */ int fputc(int ch, FILE *f) { usart_data_transmit(EVAL_COM2, (uint8_t) ch); while (RESET == usart_flag_get(EVAL_COM2,USART_STAT_TC)); return ch; }
liuxuming/trochili
firmware/GD32F1x0_Firmware_Library_V3.0.0/Examples/CAN/Network_communication/main.c
C
apache-2.0
6,662
package de.peeeq.wurstscript.validation; import com.google.common.collect.*; import de.peeeq.wurstio.utils.FileUtils; import de.peeeq.wurstscript.WLogger; import de.peeeq.wurstscript.ast.*; import de.peeeq.wurstscript.attributes.CofigOverridePackages; import de.peeeq.wurstscript.attributes.CompileError; import de.peeeq.wurstscript.attributes.ImplicitFuncs; import de.peeeq.wurstscript.attributes.names.DefLink; import de.peeeq.wurstscript.attributes.names.FuncLink; import de.peeeq.wurstscript.attributes.names.NameLink; import de.peeeq.wurstscript.attributes.names.VarLink; import de.peeeq.wurstscript.gui.ProgressHelper; import de.peeeq.wurstscript.types.*; import de.peeeq.wurstscript.utils.Utils; import de.peeeq.wurstscript.validation.controlflow.DataflowAnomalyAnalysis; import de.peeeq.wurstscript.validation.controlflow.ReturnsAnalysis; import io.vavr.Tuple2; import org.eclipse.jdt.annotation.Nullable; import java.util.*; import java.util.Map.Entry; import java.util.stream.Collectors; import static de.peeeq.wurstscript.attributes.SmallHelpers.superArgs; /** * this class validates a wurstscript program * <p> * it has visit methods for different elements in the AST and checks whether * these are correct * <p> * the validation phase might not find all errors, code transformation and * optimization phases might detect other errors because they do a more * sophisticated analysis of the program * <p> * also note that many cases are already caught by the calculation of the * attributes */ public class WurstValidator { private WurstModel prog; private int functionCount; private int visitedFunctions; private Multimap<WScope, WScope> calledFunctions = HashMultimap.create(); private @Nullable Element lastElement = null; private HashSet<String> trveWrapperFuncs = new HashSet<>(); private HashMap<String, HashSet<FunctionCall>> wrapperCalls = new HashMap<>(); public WurstValidator(WurstModel root) { this.prog = root; } public void validate(Collection<CompilationUnit> toCheck) { try { functionCount = countFunctions(); visitedFunctions = 0; prog.getErrorHandler().setProgress("Checking wurst types", ProgressHelper.getValidatorPercent(visitedFunctions, functionCount)); for (CompilationUnit cu : toCheck) { walkTree(cu); } prog.getErrorHandler().setProgress("Post checks", 0.55); postChecks(toCheck); } catch (RuntimeException e) { WLogger.severe(e); Element le = lastElement; if (le != null) { le.addError("Encountered compiler bug near element " + Utils.printElement(le) + ":\n" + Utils.printException(e)); } else { // rethrow throw e; } } } /** * checks done after walking the tree */ private void postChecks(Collection<CompilationUnit> toCheck) { checkUnusedImports(toCheck); ValidateGlobalsUsage.checkGlobalsUsage(toCheck); ValidateClassMemberUsage.checkClassMembers(toCheck); ValidateLocalUsage.checkLocalsUsage(toCheck); trveWrapperFuncs.forEach(wrapper -> { if (wrapperCalls.containsKey(wrapper)) { wrapperCalls.get(wrapper).forEach(call -> { if (call.getArgs().size() > 1 && call.getArgs().get(1) instanceof ExprStringVal) { ExprStringVal varName = (ExprStringVal) call.getArgs().get(1); TRVEHelper.protectedVariables.add(varName.getValS()); WLogger.info("keep: " + varName.getValS()); } else { call.addError("Map contains TriggerRegisterVariableEvent with non-constant arguments. Can't be optimized."); } }); } }); } private void checkUnusedImports(Collection<CompilationUnit> toCheck) { for (CompilationUnit cu : toCheck) { for (WPackage p : cu.getPackages()) { checkUnusedImports(p); } } } private void checkUnusedImports(WPackage p) { Set<PackageOrGlobal> used = Sets.newLinkedHashSet(); collectUsedPackages(used, p.getElements()); // String usedToStr = // used.stream().map(Utils::printElement).sorted().collect(Collectors.joining(", // ")); // System.out.println("used = " + usedToStr); // contributed packages for each import Map<WImport, Set<WPackage>> contributions = new HashMap<>(); for (WImport imp : p.getImports()) { Set<WPackage> contributedPackages = contributedPackages(imp.attrImportedPackage(), used, new HashSet<>()); contributions.put(imp, contributedPackages); // System.out.println( imp.getPackagename() + " contributes = " + // contributedPackages.stream().map(Utils::printElement).sorted().collect(Collectors.joining(", // "))); } // check for imports, which only contribute a subset of some other // import for (WImport imp : p.getImports()) { if (imp.attrImportedPackage() == null || imp.getIsPublic() || imp.getPackagename().equals("Wurst")) { continue; } Set<WPackage> impContributions = contributions.get(imp); if (impContributions.isEmpty()) { imp.addWarning("The import " + imp.getPackagename() + " is never used"); } else { for (WImport imp2 : p.getImports()) { if (imp == imp2) { continue; } if (contributions.get(imp2).containsAll(impContributions)) { imp.addWarning("The import " + imp.getPackagename() + " can be removed, because it is already included in " + imp2.getPackagename() + "."); break; } } } } } private Set<WPackage> contributedPackages(WPackage p, Set<PackageOrGlobal> used, Set<WPackage> visited) { if (p == null) { return Collections.emptySet(); } visited.add(p); Set<WPackage> result = new HashSet<>(); if (used.contains(p)) { result.add(p); } for (WImport imp : p.getImports()) { WPackage imported = imp.attrImportedPackage(); if (imp.getPackagename().equals("Wurst") || visited.contains(imported)) { continue; } if (imp.getIsPublic()) { result.addAll(contributedPackages(imported, used, visited)); } } return result; } private WPackage getConfiguredPackage(Element e) { PackageOrGlobal p = e.attrNearestPackage(); if(p instanceof WPackage) { if (p.getModel().attrConfigOverridePackages().containsValue(p)) { for(WPackage k : p.getModel().attrConfigOverridePackages().keySet()) { if(p.getModel().attrConfigOverridePackages().get(k).equals(p)) { return k; } } } } return null; } private void collectUsedPackages(Set<PackageOrGlobal> used, Element e) { for (int i = 0; i < e.size(); i++) { collectUsedPackages(used, e.get(i)); } if (e instanceof FuncRef) { FuncRef fr = (FuncRef) e; FuncLink link = fr.attrFuncLink(); if (link != null) { used.add(link.getDef().attrNearestPackage()); if(link.getDef().attrHasAnnotation("@config")) { WPackage configPackage = getConfiguredPackage(link.getDef()); if(configPackage != null) { used.add(configPackage); } } } } if (e instanceof NameRef) { NameRef nr = (NameRef) e; NameLink def = nr.attrNameLink(); if (def != null) { used.add(def.getDef().attrNearestPackage()); if(def.getDef().attrHasAnnotation("@config")) { WPackage configPackage = getConfiguredPackage(def.getDef()); if(configPackage != null) { used.add(configPackage); } } } } if (e instanceof TypeRef) { TypeRef t = (TypeRef) e; TypeDef def = t.attrTypeDef(); if (def != null) { used.add(def.attrNearestPackage()); } } if (e instanceof ExprBinary) { ExprBinary binop = (ExprBinary) e; FuncLink def = binop.attrFuncLink(); if (def != null) { used.add(def.getDef().attrNearestPackage()); } } if (e instanceof Expr) { WurstType typ = ((Expr) e).attrTyp(); if (typ instanceof WurstTypeNamedScope) { WurstTypeNamedScope ns = (WurstTypeNamedScope) typ; NamedScope def = ns.getDef(); if (def != null) { used.add(def.attrNearestPackage()); } } else if (typ instanceof WurstTypeTuple) { TupleDef def = ((WurstTypeTuple) typ).getTupleDef(); used.add(def.attrNearestPackage()); } } if (e instanceof ModuleUse) { ModuleUse mu = (ModuleUse) e; @Nullable ModuleDef def = mu.attrModuleDef(); if (def != null) { used.add(def.attrNearestPackage()); } } } private void walkTree(Element e) { lastElement = e; check(e); lastElement = null; for (int i = 0; i < e.size(); i++) { walkTree(e.get(i)); } } private void check(Element e) { try { if (e instanceof Annotation) checkAnnotation((Annotation) e); if (e instanceof AstElementWithTypeParameters) checkTypeParameters((AstElementWithTypeParameters) e); if (e instanceof AstElementWithNameId) checkName((AstElementWithNameId) e); if (e instanceof ClassDef) { checkAbstractMethods((ClassDef) e); visit((ClassDef) e); } if (e instanceof ClassOrModule) checkConstructorsUnique((ClassOrModule) e); if (e instanceof CompilationUnit) checkPackageName((CompilationUnit) e); if (e instanceof ConstructorDef) checkConstructor((ConstructorDef) e); if (e instanceof ConstructorDef) checkConstructorSuperCall((ConstructorDef) e); if (e instanceof ExprBinary) visit((ExprBinary) e); if (e instanceof ExprClosure) checkClosure((ExprClosure) e); if (e instanceof ExprEmpty) checkExprEmpty((ExprEmpty) e); if (e instanceof ExprIntVal) checkIntVal((ExprIntVal) e); if (e instanceof ExprFuncRef) checkFuncRef((ExprFuncRef) e); if (e instanceof ExprFunctionCall) checkBannedFunctions((ExprFunctionCall) e); if (e instanceof ExprFunctionCall) visit((ExprFunctionCall) e); if (e instanceof ExprMemberMethod) visit((ExprMemberMethod) e); if (e instanceof ExprMemberVar) checkMemberVar((ExprMemberVar) e); if (e instanceof ExprMemberArrayVar) checkMemberArrayVar((ExprMemberArrayVar) e); if (e instanceof ExprNewObject) checkNewObj((ExprNewObject) e); if (e instanceof ExprNewObject) visit((ExprNewObject) e); if (e instanceof ExprNull) checkExprNull((ExprNull) e); if (e instanceof ExprVarAccess) visit((ExprVarAccess) e); if (e instanceof ExprVarArrayAccess) checkArrayAccess((ExprVarArrayAccess) e); if (e instanceof ExtensionFuncDef) visit((ExtensionFuncDef) e); if (e instanceof FuncDef) visit((FuncDef) e); if (e instanceof FuncRef) checkFuncRef((FuncRef) e); if (e instanceof FunctionLike) checkUninitializedVars((FunctionLike) e); if (e instanceof GlobalVarDef) visit((GlobalVarDef) e); if (e instanceof HasModifier) checkModifiers((HasModifier) e); if (e instanceof HasTypeArgs) checkTypeBinding((HasTypeArgs) e); if (e instanceof InterfaceDef) checkInterfaceDef((InterfaceDef) e); if (e instanceof LocalVarDef) checkLocalShadowing((LocalVarDef) e); if (e instanceof LocalVarDef) visit((LocalVarDef) e); if (e instanceof Modifiers) visit((Modifiers) e); if (e instanceof ModuleDef) visit((ModuleDef) e); if (e instanceof NameDef) nameDefsMustNotBeNamedAfterJassNativeTypes((NameDef) e); if (e instanceof NameDef) checkConfigOverride((NameDef) e); if (e instanceof NameRef) checkImplicitParameter((NameRef) e); if (e instanceof NameRef) checkNameRef((NameRef) e); if (e instanceof StmtCall) checkCall((StmtCall) e); if (e instanceof ExprDestroy) visit((ExprDestroy) e); if (e instanceof StmtForRange) checkForRange((StmtForRange) e); if (e instanceof StmtIf) visit((StmtIf) e); if (e instanceof StmtReturn) visit((StmtReturn) e); if (e instanceof StmtSet) checkStmtSet((StmtSet) e); if (e instanceof StmtWhile) visit((StmtWhile) e); if (e instanceof SwitchStmt) checkSwitch((SwitchStmt) e); if (e instanceof TypeExpr) checkTypeExpr((TypeExpr) e); if (e instanceof TypeExprArray) checkCodeArrays((TypeExprArray) e); if (e instanceof TupleDef) checkTupleDef((TupleDef) e); if (e instanceof VarDef) checkVarDef((VarDef) e); if (e instanceof WImport) visit((WImport) e); if (e instanceof WPackage) checkPackage((WPackage) e); if (e instanceof WParameter) checkParameter((WParameter) e); if (e instanceof WParameter) visit((WParameter) e); if (e instanceof WScope) checkForDuplicateNames((WScope) e); if (e instanceof WStatement) checkReachability((WStatement) e); if (e instanceof WurstModel) checkForDuplicatePackages((WurstModel) e); if (e instanceof WStatements) { checkForInvalidStmts((WStatements) e); checkForEmptyBlocks((WStatements) e); } if (e instanceof StmtExitwhen) visit((StmtExitwhen) e); } catch (CyclicDependencyError cde) { cde.printStackTrace(); Element element = cde.getElement(); String attr = cde.getAttributeName().replaceFirst("^attr", ""); WLogger.info(Utils.printElementWithSource(Optional.of(element)) + " depends on itself when evaluating attribute " + attr); WLogger.info(cde); throw new CompileError(element.attrSource(), Utils.printElement(element) + " depends on itself when evaluating attribute " + attr); } } private void checkAbstractMethods(ClassDef c) { ImmutableMultimap<String, DefLink> nameLinks = c.attrNameLinks(); if (!c.attrIsAbstract()) { StringBuilder toImplement = new StringBuilder(); // should have no abstract methods for (DefLink link : nameLinks.values()) { NameDef f = link.getDef(); if (f.attrIsAbstract()) { if (f.attrNearestStructureDef() == c) { Element loc = f.getModifiers().stream() .filter(m -> m instanceof ModAbstract) .<Element>map(x -> x) .findFirst() .orElse(f); loc.addError("Non-abstract class " + c.getName() + " cannot have abstract functions like " + f.getName()); } else if (link instanceof FuncLink) { toImplement.append("\n "); toImplement.append(((FuncLink) link).printFunctionTemplate()); } } } if (toImplement.length() > 0) { c.addError("Non-abstract class " + c.getName() + " must implement the following functions:" + toImplement); } } } private void visit(StmtExitwhen exitwhen) { Element parent = exitwhen.getParent(); while (!(parent instanceof FunctionDefinition)) { if (parent instanceof StmtForEach) { StmtForEach forEach = (StmtForEach) parent; if (forEach.getIn().tryGetNameDef().attrIsVararg()) { exitwhen.addError("Cannot use break in vararg for each loops."); } return; } else if (parent instanceof LoopStatement) { return; } parent = parent.getParent(); } exitwhen.addError("Break is not allowed outside of loop statements."); } private void checkTupleDef(TupleDef e) { checkTupleDefCycle(e, new ArrayList<>()); } private boolean checkTupleDefCycle(TupleDef e, ArrayList<TupleDef> tuples) { if (tuples.contains(e)) { return true; } tuples.add(e); try { for (WParameter param : e.getParameters()) { WurstType t = param.getTyp().attrTyp(); if (t instanceof WurstTypeTuple) { WurstTypeTuple tt = (WurstTypeTuple) t; TupleDef tDef = tt.getTupleDef(); if (checkTupleDefCycle(tDef, tuples)) { param.addError("Parameter " + param.getName() + " is recursive. This is not allowed for tuples."); return true; } } } return false; } finally { tuples.remove(e); } } private void checkForInvalidStmts(WStatements stmts) { for (WStatement s : stmts) { if (s instanceof ExprVarAccess) { ExprVarAccess ev = (ExprVarAccess) s; s.addError("Use of variable " + ev.getVarName() + " is an incomplete statement."); } } } private void checkForEmptyBlocks(WStatements e) { Element parent = e.getParent(); // some parent cases to ignore: if (parent instanceof OnDestroyDef || parent instanceof ConstructorDef || parent instanceof FunctionDefinition || parent instanceof SwitchDefaultCaseStatements || parent instanceof SwitchCase) { return; } if (parent instanceof ExprStatementsBlock) { // for blocks in closures, we have StartFunction and EndFunction statements, so must be > 2 to be nonempty if (e.size() > 2) { return; } parent.getParent().addWarning("This function has an empty body. Write 'skip' if you intend to leave it empty."); return; } if (!e.isEmpty()) { return; } if (Utils.isJassCode(parent)) { // no warnings in Jass code return; } if (parent instanceof StmtIf) { StmtIf stmtIf = (StmtIf) parent; if (e == stmtIf.getElseBlock() && stmtIf.getHasElse()) { parent.addWarning("This if-statement has an empty else-block."); } else if (e == stmtIf.getThenBlock()) { parent.addWarning("This if-statement has an empty then-block. Write 'skip' if you intend to leave it empty."); } return; } parent.addWarning("This statement (" + Utils.printElement(parent) + ") contains an empty block. Write 'skip' if you intend to leave it empty."); } private void checkName(AstElementWithNameId e) { String name = e.getNameId().getName(); TypeDef def = e.lookupType(name, false); if (def != e && def instanceof NativeType) { e.addError( "The name '" + name + "' is already used as a native type in " + Utils.printPos(def.getSource())); } else if (!e.attrSource().getFile().endsWith(".j")) { switch (name) { case "int": case "integer": case "real": case "code": case "boolean": case "string": case "handle": e.addError("The name '" + name + "' is a built-in type and cannot be used here."); } } } private void checkConfigOverride(NameDef e) { if (!e.hasAnnotation("@config")) { return; } PackageOrGlobal nearestPackage = e.attrNearestPackage(); if (!(nearestPackage instanceof WPackage)) { e.addError("Annotation @config can only be used in packages."); return; } WPackage configPackage = (WPackage) nearestPackage; if (!configPackage.getName().endsWith(CofigOverridePackages.CONFIG_POSTFIX)) { e.addError( "Annotation @config can only be used in config packages (package name has to end with '_config')."); return; } WPackage origPackage = CofigOverridePackages.getOriginalPackage(configPackage); if (origPackage == null) { return; } if (e instanceof GlobalVarDef) { GlobalVarDef v = (GlobalVarDef) e; NameLink origVar = origPackage.getElements().lookupVarNoConfig(v.getName(), false); if (origVar == null) { e.addError("Could not find var " + v.getName() + " in configured package."); return; } if (!v.attrTyp().equalsType(origVar.getTyp(), v)) { e.addError("Configured variable must have type " + origVar.getTyp() + " but the found type is " + v.attrTyp() + "."); return; } if (!origVar.getDef().hasAnnotation("@configurable")) { e.addWarning("The configured variable " + v.getName() + " is not marked with @configurable.\n" + "It is still possible to configure this var but it is not recommended."); } } else if (e instanceof FuncDef) { FuncDef funcDef = (FuncDef) e; Collection<FuncLink> funcs = origPackage.getElements().lookupFuncsNoConfig(funcDef.getName(), false); FuncDef configuredFunc = null; for (NameLink nameLink : funcs) { if (nameLink.getDef() instanceof FuncDef) { FuncDef f = (FuncDef) nameLink.getDef(); if (equalSignatures(funcDef, f)) { configuredFunc = f; break; } } } if (configuredFunc == null) { funcDef.addError("Could not find a function " + funcDef.getName() + " with the same signature in the configured package."); } else { if (!configuredFunc.hasAnnotation("@configurable")) { e.addWarning("The configured function " + funcDef.getName() + " is not marked with @configurable.\n" + "It is still possible to configure this function but it is not recommended."); } } } else { e.addError("Configuring " + Utils.printElement(e) + " is not supported by Wurst."); } } private boolean equalSignatures(FuncDef f, FuncDef g) { if (f.getParameters().size() != g.getParameters().size()) { return false; } if (!f.attrReturnTyp().equalsType(g.attrReturnTyp(), f)) { return false; } for (int i = 0; i < f.getParameters().size(); i++) { if (!f.getParameters().get(i).attrTyp().equalsType(g.getParameters().get(i).attrTyp(), f)) { return false; } } return true; } private void checkExprEmpty(ExprEmpty e) { e.addError("Incomplete expression..."); } private void checkMemberArrayVar(ExprMemberArrayVar e) { // TODO Auto-generated method stub } private void checkNameRef(NameRef e) { if (e.getVarName().isEmpty()) { e.addError("Missing variable name."); } } private void checkPackage(WPackage p) { checkForDuplicateImports(p); p.attrInitDependencies(); } private void checkTypeExpr(TypeExpr e) { if (e instanceof TypeExprResolved) { return; } if (e.isModuleUseTypeArg()) { return; } TypeDef typeDef = e.attrTypeDef(); // check that modules are not used as normal types if (e.attrTypeDef() instanceof ModuleDef) { ModuleDef md = (ModuleDef) e.attrTypeDef(); checkModuleTypeUsedCorrectly(e, md); } if (typeDef instanceof TypeParamDef) { // references a type parameter TypeParamDef tp = (TypeParamDef) typeDef; checkTypeparamsUsedCorrectly(e, tp); } } /** * Checks that module types are only used in valid places */ private void checkModuleTypeUsedCorrectly(TypeExpr e, ModuleDef md) { if (e instanceof TypeExprThis) { // thistype is allowed, because it is translated to a real type when used return; } if (e.getParent() instanceof TypeExprThis) { TypeExprThis parent = (TypeExprThis) e.getParent(); if (parent.getScopeType() == e) { // ModuleName.thistype is allowed // TODO (maybe check here that it is a parent) return; } } if (e instanceof TypeExprSimple) { TypeExprSimple tes = (TypeExprSimple) e; if (tes.getScopeType() instanceof TypeExpr) { TypeExpr scopeType = (TypeExpr) tes.getScopeType(); if (scopeType instanceof TypeExprThis || scopeType.attrTypeDef() instanceof ModuleDef) { // thistype.A etc. is allowed return; } } } e.addError("Cannot use module type " + md.getName() + " in this context."); } /** * check that type parameters are used in correct contexts: */ private void checkTypeparamsUsedCorrectly(TypeExpr e, TypeParamDef tp) { if (tp.isStructureDefTypeParam()) { // typeParamDef is for // structureDef if (tp.attrNearestStructureDef() instanceof ModuleDef) { // in modules we can also type-params in static contexts return; } if (!e.attrIsDynamicContext()) { e.addError("Type variables must not be used in static contexts."); } } } private void checkClosure(ExprClosure e) { WurstType expectedTyp = e.attrExpectedTypAfterOverloading(); if (expectedTyp instanceof WurstTypeCode) { // TODO check if no vars are captured if (!e.attrCapturedVariables().isEmpty()) { for (Entry<Element, VarDef> elem : e.attrCapturedVariables().entries()) { elem.getKey().addError("Cannot capture local variable '" + elem.getValue().getName() + "' in anonymous function. This is only possible with closures."); } } } else if (expectedTyp instanceof WurstTypeUnknown || expectedTyp instanceof WurstTypeClosure) { e.addError("Closures can only be used when a interface or class type is given."); } else if (!(expectedTyp instanceof WurstTypeClass || expectedTyp instanceof WurstTypeInterface)) { e.addError("Closures can only be used when a interface or class type is given, " + "but at this position a " + expectedTyp + " is expected."); } e.attrCapturedVariables(); if (e.getImplementation() instanceof ExprStatementsBlock) { ExprStatementsBlock block = (ExprStatementsBlock) e.getImplementation(); new DataflowAnomalyAnalysis(false).execute(block); } if (expectedTyp instanceof WurstTypeClass) { WurstTypeClass ct = (WurstTypeClass) expectedTyp; ClassDef cd = ct.getClassDef(); if (cd.getConstructors().stream().noneMatch(constr -> constr.getParameters().isEmpty())) { e.addError("No default constructor for class " + ct + " found, so it cannot be instantiated using an anonymous function."); } } } private void checkConstructorsUnique(ClassOrModule c) { List<ConstructorDef> constrs = c.getConstructors(); for (int i = 0; i < constrs.size() - 1; i++) { ConstructorDef c1 = constrs.get(i); for (int j = i + 1; i < constrs.size(); i++) { ConstructorDef c2 = constrs.get(j); if (c1.getParameters().size() != c2.getParameters().size()) { continue; } if (!parametersTypeDisjunct(c1.getParameters(), c2.getParameters())) { c2.addError( "Duplicate constructor, an other constructor with similar types is already defined in line " + c1.attrSource().getLine()); } } } } private boolean parametersTypeDisjunct(WParameters params1, WParameters params2) { for (int i = 0; i < params1.size(); i++) { WurstType t1 = params1.get(i).attrTyp(); WurstType t2 = params2.get(i).attrTyp(); if (!t1.isSubtypeOf(t2, params1) && !t2.isSubtypeOf(t1, params2)) { return true; } } return false; } private void checkImplicitParameter(NameRef e) { e.attrImplicitParameter(); } private void checkTypeParameters(AstElementWithTypeParameters e) { for (TypeParamDef ta : e.getTypeParameters()) { if (ta.getName().contains("<") || ta.getName().startsWith("#")) { ta.addError("Type parameter must be a simple name "); } else { checkTypeName(ta, ta.getName()); } ta.attrTyp(); } } private void checkExprNull(ExprNull e) { if (!Utils.isJassCode(e) && e.attrExpectedTyp() instanceof WurstTypeUnknown) { e.addError( "Cannot use 'null' constant here because " + "the compiler cannot infer which kind of null it is."); } } private void checkForRange(StmtForRange e) { if (!(e.getLoopVar().attrTyp().isSubtypeOf(WurstTypeInt.instance(), e))) { e.getLoopVar().addError("For-loop variable must be int."); } if (!(e.getTo().attrTyp().isSubtypeOf(WurstTypeInt.instance(), e))) { e.getLoopVar().addError("For-loop target must be int."); } if (!(e.getStep().attrTyp().isSubtypeOf(WurstTypeInt.instance(), e))) { e.getLoopVar().addError("For-loop step must be int."); } } private void checkIntVal(ExprIntVal e) { // check range? ... } private int countFunctions() { final int functionCount[] = new int[1]; prog.accept(new WurstModel.DefaultVisitor() { @Override public void visit(FuncDef f) { super.visit(f); functionCount[0]++; } }); return functionCount[0]; } private void checkStmtSet(StmtSet s) { NameLink nameLink = s.getUpdatedExpr().attrNameLink(); if (nameLink == null) { s.getUpdatedExpr().addError("Could not find variable " + s.getUpdatedExpr().getVarName() + "."); return; } if (!(nameLink instanceof VarLink)) { s.getUpdatedExpr() .addError("Invalid assignment. This is not a variable, this is a " + nameLink); return; } WurstType leftType = s.getUpdatedExpr().attrTyp(); WurstType rightType = s.getRight().attrTyp(); checkAssignment(Utils.isJassCode(s), s, leftType, rightType); checkIfAssigningToConstant(s.getUpdatedExpr()); checkIfNoEffectAssignment(s); } private void checkIfNoEffectAssignment(StmtSet s) { if (refersToSameVar(s.getUpdatedExpr(), s.getRight())) { s.addWarning("The assignment to " + Utils.printElement(s.getUpdatedExpr().attrNameDef()) + " probably has no effect."); } } private boolean refersToSameVar(OptExpr a, OptExpr b) { if (a instanceof NoExpr && b instanceof NoExpr) { return true; } if (a instanceof ExprThis && b instanceof ExprThis) { return true; } if (a instanceof NameRef && b instanceof NameRef) { NameRef va = (NameRef) a; NameRef vb = (NameRef) b; NameLink nla = va.attrNameLink(); NameLink nlb = vb.attrNameLink(); if (nla != null && nlb != null && nla.getDef() == nlb.getDef() && refersToSameVar(va.attrImplicitParameter(), vb.attrImplicitParameter())) { if (va instanceof AstElementWithIndexes && vb instanceof AstElementWithIndexes) { AstElementWithIndexes vai = (AstElementWithIndexes) va; AstElementWithIndexes vbi = (AstElementWithIndexes) vb; for (int i = 0; i < vai.getIndexes().size() && i < vbi.getIndexes().size(); i++) { if (!refersToSameVar(vai.getIndexes().get(i), vbi.getIndexes().get(i))) { return false; } } } return true; } } return false; } private void checkIfAssigningToConstant(final LExpr left) { left.match(new LExpr.MatcherVoid() { @Override public void case_ExprVarArrayAccess(ExprVarArrayAccess e) { } @Override public void case_ExprVarAccess(ExprVarAccess e) { checkVarNotConstant(e, e.attrNameLink()); } @Override public void case_ExprMemberVarDot(ExprMemberVarDot e) { if (e.attrNameDef() instanceof WParameter) { // we have an assignment to a tuple variable // check whether left side is 'this' or a constant variable if (e.getLeft() instanceof ExprThis) { e.addError("Cannot change 'this'. Tuples are not classes."); } else if (e.getLeft() instanceof NameRef) { checkIfAssigningToConstant((NameRef) e.getLeft()); } else { e.addError( "Ok, so you are trying to assign something to the return value of a function. This wont do nothing. Tuples are not classes."); } } checkVarNotConstant(e, e.attrNameLink()); } @Override public void case_ExprMemberArrayVarDot(ExprMemberArrayVarDot e) { } @Override public void case_ExprMemberArrayVarDotDot(ExprMemberArrayVarDotDot e) { e.addError("Cannot assign to dot-dot-expression."); } @Override public void case_ExprMemberVarDotDot(ExprMemberVarDotDot e) { e.addError("Cannot assign to dot-dot-expression."); } }); } private void checkVarNotConstant(NameRef left, @Nullable NameLink link) { if (link == null) { return; } NameDef var = link.getDef(); if (var != null && var.attrIsConstant()) { if (var instanceof GlobalVarDef) { GlobalVarDef glob = (GlobalVarDef) var; if (glob.attrIsDynamicClassMember() && isInConstructor(left)) { // allow to assign constant members in constructor return; } } left.addError("Cannot assign a new value to constant " + Utils.printElement(var)); } } private boolean isInConstructor(Element e) { while (e != null) { if (e instanceof ConstructorDef) { return true; } e = e.getParent(); } return false; } private void checkAssignment(boolean isJassCode, Element pos, WurstType leftType, WurstType rightType) { if (!rightType.isSubtypeOf(leftType, pos)) { if (isJassCode) { if (leftType.isSubtypeOf(WurstTypeReal.instance(), pos) && rightType.isSubtypeOf(WurstTypeInt.instance(), pos)) { // special case: jass allows to assign an integer to a real // variable return; } } pos.addError("Cannot assign " + rightType + " to " + leftType); } if (leftType instanceof WurstTypeNamedScope) { WurstTypeNamedScope ns = (WurstTypeNamedScope) leftType; if (ns.isStaticRef()) { pos.addError("Missing variable name in variable declaration.\n" + "Cannot assign to " + leftType); } } if (leftType instanceof WurstTypeArray) { pos.addError("Missing array index for assignment to array variable.s"); } if (rightType instanceof WurstTypeVoid) { if (pos.attrNearestPackage() instanceof WPackage) { WPackage pack = (WPackage) pos.attrNearestPackage(); if (pack != null && !pack.getName().equals("WurstREPL")) { // allow // assigning // nothing // to // a // variable // in // the // Repl pos.addError("Function or expression returns nothing. Cannot assign nothing to a variable."); } } } } private void visit(LocalVarDef s) { checkVarName(s, false); if (s.getInitialExpr() instanceof Expr) { Expr initial = (Expr) s.getInitialExpr(); if ((s.getOptTyp() instanceof NoTypeExpr)) { // TODO } else { if (initial instanceof ExprNewObject) { s.addWarning("Duplicated type information. Use 'var' or 'let' instead."); } } WurstType leftType = s.attrTyp(); WurstType rightType = initial.attrTyp(); checkAssignment(Utils.isJassCode(s), s, leftType, rightType); } else if (s.getInitialExpr() instanceof ArrayInitializer) { ArrayInitializer arInit = (ArrayInitializer) s.getInitialExpr(); checkArrayInit(s, arInit); } checkIfRead(s); } private void checkArrayInit(VarDef def, ArrayInitializer arInit) { WurstType leftType = def.attrTyp(); if (leftType instanceof WurstTypeArray) { WurstTypeArray arT = (WurstTypeArray) leftType; if (arT.getDimensions() > 1) { def.addError("Array initializer can only be used with one-dimensional arrays."); } if (arT.getDimensions() == 1) { int initialValues = arInit.getValues().size(); int size = arT.getSize(0); if (size >= 0 && size != initialValues) { def.addError("Array variable " + def.getName() + " is an array of size " + size + ", but is initialized with " + initialValues + " values here."); } } WurstType baseType = arT.getBaseType(); for (Expr expr : arInit.getValues()) { if (!expr.attrTyp().isSubtypeOf(baseType, expr)) { expr.addError("Expected expression of type " + baseType + " in array initialization, but found " + expr.attrTyp()); } } } else { def.addError("Array initializer can only be used with array-variables, but " + Utils.printElement(def) + " has type " + leftType); } } private void checkIfRead(VarDef s) { if (s.getName().startsWith("_")) { // variables starting with an underscore are not read // (same convention as in Erlang) return; } if (Utils.isJassCode(s)) { return; } if (s.getParent() instanceof StmtForRange) { // it is ok, when the variable of a for-statement is not used return; } WScope f = s.attrNearestScope(); if (f != null && !f.attrReadVariables().contains(s)) { s.addWarning("The " + Utils.printElement(s) + " is never read. If intentional, prefix with \"_\" to suppress this warning."); } } private void checkVarName(VarDef s, boolean isConstant) { String varName = s.getName(); if (!isValidVarnameStart(varName) // first letter not lower case && !Utils.isJassCode(s) // not in jass code && !varName.matches("[A-Z0-9_]+") // not a constant ) { s.addWarning("Variable names should start with a lower case character. (" + varName + ")"); } if (varName.equals("handle")) { s.addError("\"handle\" is not a valid variable name"); } else if (varName.equals("code")) { s.addError("\"code\" is not a valid variable name"); } } private boolean isValidVarnameStart(String varName) { return varName.length() > 0 && Character.isLowerCase(varName.charAt(0)) || varName.startsWith("_"); } private void visit(WParameter p) { checkVarName(p, false); if (p.attrIsVararg()) { if (p.attrNearestFuncDef().getParameters().size() != 1) { p.addError("Vararg functions may only have one parameter"); } } checkIfParameterIsRead(p); } private void checkIfParameterIsRead(WParameter p) { FunctionImplementation f = p.attrNearestFuncDef(); if (f != null) { if (p.getParent().getParent() instanceof ExprClosure) { // closures can ignore parameters return; } if (f.attrIsOverride()) { // if a function is overridden it is ok to ignore parameters return; } if (f.attrIsAbstract()) { // if a function is abstract, then parameter vars are not used return; } if (f.attrHasAnnotation("compiletimenative")) { return; } } else { if (p.getParent().getParent() instanceof TupleDef) { // ignore tuples return; } if (p.getParent().getParent() instanceof NativeFunc) { // ignore native functions return; } } checkIfRead(p); } private void visit(GlobalVarDef s) { checkVarName(s, s.attrIsConstant()); if (s.getInitialExpr() instanceof Expr) { Expr initial = (Expr) s.getInitialExpr(); WurstType leftType = s.attrTyp(); WurstType rightType = initial.attrTyp(); checkAssignment(Utils.isJassCode(s), s, leftType, rightType); } else if (s.getInitialExpr() instanceof ArrayInitializer) { checkArrayInit(s, (ArrayInitializer) s.getInitialExpr()); } if (s.attrTyp() instanceof WurstTypeArray && !s.attrIsStatic() && s.attrIsDynamicClassMember()) { // s.addError("Array variables must be static.\n" + // "Hint: use Lists for dynamic stuff."); } } private void visit(StmtIf stmtIf) { WurstType condType = stmtIf.getCond().attrTyp(); if (!(condType instanceof WurstTypeBool)) { stmtIf.getCond().addError("If condition must be a boolean but found " + condType); } } private void visit(StmtWhile stmtWhile) { WurstType condType = stmtWhile.getCond().attrTyp(); if (!(condType instanceof WurstTypeBool)) { stmtWhile.getCond().addError("While condition must be a boolean but found " + condType); } } private void visit(ExtensionFuncDef func) { checkFunctionName(func); func.getExtendedType().attrTyp(); } private void checkFunctionName(FunctionDefinition f) { if (!Utils.isJassCode(f)) { if (!isValidVarnameStart(f.getName())) { f.addWarning("Function names should start with an lower case character."); } } } private void checkReturn(FunctionLike func) { if (!func.attrHasEmptyBody()) { new ReturnsAnalysis().execute(func); } else { // no body, check if in interface: if (func instanceof FunctionImplementation) { FunctionImplementation funcDef = (FunctionImplementation) func; if (funcDef.getReturnTyp() instanceof TypeExpr && !(func.attrNearestStructureDef() instanceof InterfaceDef)) { func.addError("Function " + funcDef.getName() + " is missing a body. Use the 'skip' statement to define an empty body."); } } } } private void checkReachability(WStatement s) { if (s.getParent() instanceof WStatements) { WStatements stmts = (WStatements) s.getParent(); if (s.attrPreviousStatements().isEmpty()) { if (s.attrListIndex() > 0 || !(stmts.getParent() instanceof TranslatedToImFunction || stmts.getParent() instanceof ExprStatementsBlock)) { if (Utils.isJassCode(s)) { // in jass this is just a warning, because // the shitty code emitted by jasshelper sometimes // contains unreachable code s.addWarning("Unreachable code"); } else { if (mightBeAffectedBySwitchThatCoversAllCases(s)) { // fow backwards compatibility just use a warning when // switch statements that handle all cases are involved: s.addWarning("Unreachable code"); } else { s.addError("Unreachable code"); } } } } } } private boolean mightBeAffectedBySwitchThatCoversAllCases(WStatement s) { boolean[] containsSwitchAr = { false }; s.attrNearestNamedScope().accept(new Element.DefaultVisitor() { @Override public void visit(SwitchStmt switchStmt) { if (switchStmt.calculateHandlesAllCases()) { containsSwitchAr[0] = true; } } }); return containsSwitchAr[0]; } private void visit(FuncDef func) { visitedFunctions++; func.getErrorHandler().setProgress(null, ProgressHelper.getValidatorPercent(visitedFunctions, functionCount)); checkFunctionName(func); if (func.attrIsAbstract()) { if (!func.attrHasEmptyBody()) { func.addError("Abstract function " + func.getName() + " must not have a body."); } if (func.attrIsPrivate()) { func.addError("Abstract functions must not be private."); } } } private void checkUninitializedVars(FunctionLike f) { boolean isAbstract = false; if (f instanceof FuncDef) { FuncDef func = (FuncDef) f; if (func.attrIsAbstract()) { isAbstract = true; if (!func.attrHasEmptyBody()) { func.getBody().get(0) .addError("The abstract function " + func.getName() + " must not have any statements."); } } } if (!isAbstract) { // not abstract checkReturn(f); if (!f.getSource().getFile().endsWith("common.j") && !f.getSource().getFile().endsWith("blizzard.j") && !f.getSource().getFile().endsWith("war3map.j")) { new DataflowAnomalyAnalysis(Utils.isJassCode(f)).execute(f); } } } private void checkCall(StmtCall call) { String funcName; if (call instanceof FunctionCall) { FunctionCall fcall = (FunctionCall) call; funcName = fcall.getFuncName(); HashSet<FunctionCall> fcalls = wrapperCalls.computeIfAbsent(funcName, (String s) -> new HashSet<>()); fcalls.add(fcall); } else if (call instanceof ExprNewObject) { funcName = "constructor"; } else { throw new Error("unhandled case: " + Utils.printElement(call)); } call.attrCallSignature().checkSignatureCompatibility(call.attrFunctionSignature(), funcName, call); } private void checkAnnotation(Annotation a) { FuncLink fl = a.attrFuncLink(); if (fl != null) { if (a.getArgs().size() < fl.getParameterTypes().size()) { a.addWarning("not enough arguments"); } else if (a.getArgs().size() > fl.getParameterTypes().size()) { a.addWarning("too many enough arguments"); } else { for (int i = 0; i < a.getArgs().size(); i++) { WurstType actual = a.getArgs().get(i).attrTyp(); WurstType expected = fl.getParameterType(i); if (!actual.isSubtypeOf(expected, a)) { a.getArgs().get(i).addWarning("Expected " + expected + " but found " + actual + "."); } } } } } private void visit(ExprFunctionCall stmtCall) { String funcName = stmtCall.getFuncName(); // calculating the exprType should reveal most errors: stmtCall.attrTyp(); checkFuncDefDeprecated(stmtCall); if (stmtCall.attrFuncLink() != null) { FuncLink calledFunc = stmtCall.attrFuncLink(); if (calledFunc.getDef().attrIsDynamicClassMember()) { if (!stmtCall.attrIsDynamicContext()) { stmtCall.addError("Cannot call dynamic function " + funcName + " from static context."); } } if (calledFunc.getDef() instanceof ExtensionFuncDef) { stmtCall.addError("Extension function " + funcName + " must be called with an explicit receiver.\n" + "Try to write this." + funcName + "(...) ."); } } // special check for filter & condition: if (Utils.oneOf(funcName, "Condition", "Filter") && !stmtCall.getArgs().isEmpty()) { Expr firstArg = stmtCall.getArgs().get(0); if (firstArg instanceof ExprFuncRef) { ExprFuncRef exprFuncRef = (ExprFuncRef) firstArg; FuncLink f = exprFuncRef.attrFuncLink(); if (f != null) { if (!(f.getReturnType() instanceof WurstTypeBool) && !(f.getReturnType() instanceof WurstTypeVoid)) { firstArg.addError("Functions passed to Filter or Condition must return boolean or nothing."); } } } } } // private void checkParams(Element where, List<Expr> args, // FunctionDefinition calledFunc) { // if (calledFunc == null) { // return; // } // List<PscriptType> parameterTypes = calledFunc.attrParameterTypes(); // checkParams(where, args, parameterTypes); // } @Deprecated private void checkParams(Element where, String preMsg, List<Expr> args, FunctionSignature sig) { checkParams(where, preMsg, args, sig.getParamTypes()); } @Deprecated private void checkParams(Element where, String preMsg, List<Expr> args, List<WurstType> parameterTypes) { if (args.size() > parameterTypes.size()) { where.addError(preMsg + "Too many parameters."); } else if (args.size() < parameterTypes.size()) { where.addError(preMsg + "Missing parameters."); } else { for (int i = 0; i < args.size(); i++) { WurstType actual = args.get(i).attrTyp(); WurstType expected = parameterTypes.get(i); // if (expected instanceof AstElementWithTypeArgs) if (!actual.isSubtypeOf(expected, where)) { args.get(i).addError( preMsg + "Expected " + expected + " as parameter " + (i + 1) + " but found " + actual); } } } } private void visit(ExprBinary expr) { FuncLink def = expr.attrFuncLink(); if (def != null) { FunctionSignature sig = FunctionSignature.fromNameLink(def); CallSignature callSig = new CallSignature(expr.getLeft(), Collections.singletonList(expr.getRight())); callSig.checkSignatureCompatibility(sig, "" + expr.getOp(), expr); } } private void visit(ExprMemberMethod stmtCall) { // calculating the exprType should reveal all errors: stmtCall.attrTyp(); } private void visit(ExprNewObject stmtCall) { stmtCall.attrTyp(); stmtCall.attrConstructorDef(); } private void visit(Modifiers modifiers) { boolean hasVis = false; boolean isStatic = false; for (Modifier m : modifiers) { if (m instanceof VisibilityModifier) { if (hasVis) { m.addError("Each element can only have one visibility modifier (public, private, ...)"); } hasVis = true; } else if (m instanceof ModStatic) { if (isStatic) { m.addError("double static? - what r u trying to do?"); } isStatic = true; } } } private void visit(StmtReturn s) { if (s.attrNearestExprStatementsBlock() != null) { ExprStatementsBlock e = s.attrNearestExprStatementsBlock(); if (e.getReturnStmt() != s) { s.addError("Return in a statements block can only be at the end."); return; } if (s.getReturnedObj() instanceof Expr) { Expr expr = (Expr) s.getReturnedObj(); if (expr.attrTyp().isVoid()) { s.addError("Cannot return void from statements block."); } } else { s.addError("Cannot have empty return statement in statements block."); } } else { FunctionImplementation func = s.attrNearestFuncDef(); if (func == null) { s.addError("return statements can only be used inside functions"); return; } checkReturnInFunc(s, func); } } private void checkReturnInFunc(StmtReturn s, FunctionImplementation func) { WurstType returnType = func.attrReturnTyp(); if (s.getReturnedObj() instanceof Expr) { Expr returned = (Expr) s.getReturnedObj(); if (returnType.isSubtypeOf(WurstTypeVoid.instance(), s)) { s.addError("Cannot return a value from a function which returns nothing"); } else { WurstType returnedType = returned.attrTyp(); if (!returnedType.isSubtypeOf(returnType, s)) { s.addError("Cannot return " + returnedType + ", expected expression of type " + returnType); } } } else { // empty return if (!returnType.isSubtypeOf(WurstTypeVoid.instance(), s)) { s.addError("Missing return value"); } } } private void visit(ClassDef classDef) { checkTypeName(classDef, classDef.getName()); if (!(classDef.getExtendedClass() instanceof NoTypeExpr) && !(classDef.getExtendedClass().attrTyp() instanceof WurstTypeClass)) { classDef.getExtendedClass().addError("Classes may only extend other classes."); } if (classDef.isInnerClass() && !classDef.attrIsStatic()) { classDef.addError("At the moment only static inner classes are supported."); } } private void checkTypeName(Element source, String name) { if (!Character.isUpperCase(name.charAt(0))) { source.addWarning("Type names should start with upper case characters."); } } private void visit(ModuleDef moduleDef) { checkTypeName(moduleDef, moduleDef.getName()); // calculate all functions to find possible errors moduleDef.attrNameLinks(); } private void visit(ExprDestroy stmtDestroy) { WurstType typ = stmtDestroy.getDestroyedObj().attrTyp(); if (typ instanceof WurstTypeModule) { } else if (typ instanceof WurstTypeClass) { WurstTypeClass c = (WurstTypeClass) typ; checkDestroyClass(stmtDestroy, c); } else if (typ instanceof WurstTypeInterface) { WurstTypeInterface i = (WurstTypeInterface) typ; checkDestroyInterface(stmtDestroy, i); } else { stmtDestroy.addError("Cannot destroy objects of type " + typ); } } private void checkDestroyInterface(ExprDestroy stmtDestroy, WurstTypeInterface i) { if (i.isStaticRef()) { stmtDestroy.addError("Cannot destroy interface " + i); } } private void checkDestroyClass(ExprDestroy stmtDestroy, WurstTypeClass c) { if (c.isStaticRef()) { stmtDestroy.addError("Cannot destroy class " + c); } calledFunctions.put(stmtDestroy.attrNearestScope(), c.getClassDef().getOnDestroy()); } private void visit(ExprVarAccess e) { checkVarRef(e, e.attrIsDynamicContext()); } private void visit(WImport wImport) { if (wImport.attrImportedPackage() == null) { if (!wImport.getPackagename().equals("NoWurst")) { wImport.addError("Could not find imported package " + wImport.getPackagename()); } return; } if (!wImport.attrImportedPackage().getName().equals("Wurst") && wImport.attrImportedPackage().getName().equals(wImport.attrNearestNamedScope().getName())) { wImport.addError("Packages cannot import themselves"); } } /** * check if the nameRef e is accessed correctly i.e. not using a dynamic * variable from a static context * * @param e * @param dynamicContext */ private void checkVarRef(NameRef e, boolean dynamicContext) { NameLink link = e.attrNameLink(); if (link == null) { return; } NameDef def = link.getDef(); if (def instanceof GlobalVarDef) { GlobalVarDef g = (GlobalVarDef) def; if (g.attrIsDynamicClassMember() && !dynamicContext) { e.addError("Cannot reference dynamic variable " + e.getVarName() + " from static context."); } } checkNameRefDeprecated(e, def); if (e.attrTyp() instanceof WurstTypeNamedScope) { WurstTypeNamedScope wtns = (WurstTypeNamedScope) e.attrTyp(); if (wtns.isStaticRef()) { if (!isUsedAsReceiverInExprMember(e)) { e.addError("Reference to " + e.getVarName() + " cannot be used as an expression."); } else if (e.getParent() instanceof ExprMemberMethodDotDot) { e.addError("Reference to " + e.getVarName() + " cannot be used with the cascade operator. Only dynamic objects are allowed."); } else if (e.getParent() instanceof ExprMemberMethod) { ExprMemberMethod em = (ExprMemberMethod) e.getParent(); if (em.attrFuncDef() instanceof ExtensionFuncDef) { e.addError("Reference to " + e.getVarName() + " can only be used for calling static methods, but not for calling extension method method '" + em.getFuncName() + "'."); } } } } } private boolean isUsedAsReceiverInExprMember(Expr e) { if (e.getParent() instanceof ExprMember) { ExprMember em = (ExprMember) e.getParent(); return em.getLeft() == e; } else if (e.getParent() instanceof StmtForIn) { // if we write for x in E, then it actually calls E.iterator(), so it is used in an ExprMember StmtForIn parent = (StmtForIn) e.getParent(); return parent.getIn() == e; } else if (e.getParent() instanceof StmtForFrom) { StmtForFrom parent = (StmtForFrom) e.getParent(); return parent.getIn() == e; } return false; } private void checkTypeBinding(HasTypeArgs e) { VariableBinding mapping = e.match(new HasTypeArgs.Matcher<VariableBinding>() { @Override public VariableBinding case_ExprNewObject(ExprNewObject e) { return e.attrTyp().getTypeArgBinding(); } @Override public VariableBinding case_ModuleUse(ModuleUse moduleUse) { return null; } @Override public VariableBinding case_TypeExprSimple(TypeExprSimple e) { return e.attrTyp().getTypeArgBinding(); } @Override public VariableBinding case_ExprFunctionCall(ExprFunctionCall e) { return e.attrTyp().getTypeArgBinding(); } @Override public VariableBinding case_ExprMemberMethodDot(ExprMemberMethodDot e) { return e.attrTyp().getTypeArgBinding(); } @Override public VariableBinding case_ExprMemberMethodDotDot(ExprMemberMethodDotDot e) { return e.attrTyp().getTypeArgBinding(); } }); if (mapping == null) { return; } for (Tuple2<TypeParamDef, WurstTypeBoundTypeParam> t : mapping) { WurstTypeBoundTypeParam boundTyp = t._2(); WurstType typ = boundTyp.getBaseType(); TypeParamDef tp = t._1(); if (tp.getTypeParamConstraints() instanceof TypeExprList) { // new style generics } else { // old style generics if (!typ.isTranslatedToInt() && !(e instanceof ModuleUse)) { String toIndexFuncName = ImplicitFuncs.toIndexFuncName(typ); String fromIndexFuncName = ImplicitFuncs.fromIndexFuncName(typ); Collection<FuncLink> toIndexFuncs = ImplicitFuncs.findToIndexFuncs(typ, e); Collection<FuncLink> fromIndexFuncs = ImplicitFuncs.findFromIndexFuncs(typ, e); if (toIndexFuncs.isEmpty()) { e.addError("Type parameters can only be bound to ints and class types, but " + "not to " + typ + ".\n" + "You can provide functions " + toIndexFuncName + " and " + fromIndexFuncName + " to use this type " + "with generics."); } else if (fromIndexFuncs.isEmpty()) { e.addError("Could not find function " + fromIndexFuncName + " which is required to use " + typ + " with generics."); } else { if (toIndexFuncs.size() > 1) { e.addError("There is more than one function named " + toIndexFuncName); } if (fromIndexFuncs.size() > 1) { e.addError("There is more than one function named " + fromIndexFuncName); } NameDef toIndex = Utils.getFirst(toIndexFuncs).getDef(); if (toIndex instanceof FuncDef) { FuncDef toIndexF = (FuncDef) toIndex; if (toIndexF.getParameters().size() != 1) { toIndexF.addError("Must have exactly one parameter"); } else if (!toIndexF.getParameters().get(0).attrTyp().equalsType(typ, e)) { toIndexF.addError("Parameter must be of type " + typ); } WurstType returnType = toIndexF.attrReturnTyp(); if (!returnType.equalsType(WurstTypeInt.instance(), e)) { toIndexF.addError("Return type must be of type int " + " but was " + returnType); } } else { toIndex.addError("This should be a function."); } NameDef fromIndex = Utils.getFirst(fromIndexFuncs).getDef(); if (fromIndex instanceof FuncDef) { FuncDef fromIndexF = (FuncDef) fromIndex; if (fromIndexF.getParameters().size() != 1) { fromIndexF.addError("Must have exactly one parameter"); } else if (!fromIndexF.getParameters().get(0).attrTyp() .equalsType(WurstTypeInt.instance(), e)) { fromIndexF.addError("Parameter must be of type int"); } WurstType returnType = fromIndexF.attrReturnTyp(); if (!returnType.equalsType(typ, e)) { fromIndexF.addError("Return type must be of type " + typ + " but was " + returnType); } } else { fromIndex.addError("This should be a function."); } } } } } } private void checkFuncRef(FuncRef ref) { if (ref.getFuncName().isEmpty()) { ref.addError("Missing function name."); } checkFuncDefDeprecated(ref); FuncLink called = ref.attrFuncLink(); if (called == null) { return; } WScope scope = ref.attrNearestFuncDef(); if (scope == null) { scope = ref.attrNearestScope(); } if (!(ref instanceof ExprFuncRef)) { // ExprFuncRef is not a direct call calledFunctions.put(scope, called.getDef()); } } private void checkNameRefDeprecated(Element trace, NameLink link) { if (link != null) { checkNameRefDeprecated(trace, link.getDef()); } } private void checkNameRefDeprecated(Element trace, NameDef def) { if (def != null && def.hasAnnotation("@deprecated")) { Annotation annotation = def.getAnnotation("@deprecated"); String msg = annotation.getAnnotationMessage(); msg = (msg == null || msg.isEmpty()) ? "It shouldn't be used and will be removed in the future." : msg; trace.addWarning("<" + def.getName() + "> is deprecated. " + msg); } } private void checkFuncDefDeprecated(FuncRef ref) { checkNameRefDeprecated(ref, ref.attrFuncLink()); } private void checkFuncRef(ExprFuncRef ref) { FuncLink called = ref.attrFuncLink(); if (called == null) { return; } if (ref.attrTyp() instanceof WurstTypeCode) { if (called.getDef().attrParameterTypesIncludingReceiver().size() > 0) { String msg = "Can only use functions without parameters in 'code' function references."; if (called.getDef().attrIsDynamicClassMember()) { msg += "\nNote that " + called.getName() + " is a dynamic function and thus has an implicit parameter 'this'."; } ref.addError(msg); } } } private void checkModifiers(final HasModifier e) { for (final Modifier m : e.getModifiers()) { final StringBuilder error = new StringBuilder(); e.match(new HasModifier.MatcherVoid() { @Override public void case_WParameter(WParameter wParameter) { check(ModConstant.class); } @Override public void case_WShortParameter(WShortParameter wShortParameter) { check(ModConstant.class); } @Override public void case_TypeParamDef(TypeParamDef typeParamDef) { error.append("Type Parameters must not have modifiers"); } @Override public void case_NativeType(NativeType nativeType) { check(VisibilityPublic.class); } @SafeVarargs private final void check(Class<? extends Modifier>... allowed) { if (m instanceof WurstDoc) { // wurstdoc always allowed return; } if (m instanceof ModVararg && e.getParent() instanceof WParameters) { return; } boolean isAllowed = false; for (Class<? extends Modifier> a : allowed) { String modName = m.getClass().getName(); String allowedName = a.getName(); if (modName.startsWith(allowedName)) { isAllowed = true; break; } } if (!isAllowed) { error.append("Modifier ").append(printMod(m)).append(" not allowed for ").append(Utils.printElement(e)).append(".\n Allowed are the " + "following modifiers: "); boolean first = true; for (Class<? extends Modifier> c : allowed) { if (!first) { error.append(", "); } error.append(printMod(c)); first = false; } } } @Override public void case_NativeFunc(NativeFunc nativeFunc) { check(VisibilityPublic.class, Annotation.class); } @Override public void case_ModuleInstanciation(ModuleInstanciation moduleInstanciation) { check(VisibilityPrivate.class, VisibilityProtected.class); } @Override public void case_ModuleDef(ModuleDef moduleDef) { check(VisibilityPublic.class); } @Override public void case_LocalVarDef(LocalVarDef localVarDef) { check(ModConstant.class); if (localVarDef.hasAnnotation("@compiletime")) { localVarDef.getAnnotation("@compiletime").addWarning("The annotation '@compiletime' has no effect on variables."); } } @Override public void case_GlobalVarDef(GlobalVarDef g) { if (g.attrNearestClassOrModule() != null) { check(VisibilityPrivate.class, VisibilityProtected.class, ModStatic.class, ModConstant.class, Annotation.class); } else { check(VisibilityPublic.class, ModConstant.class, Annotation.class); } if (g.hasAnnotation("@compiletime")) { g.getAnnotation("@compiletime").addWarning("The annotation '@compiletime' has no effect on variables."); } } @Override public void case_FuncDef(FuncDef f) { if (f.attrNearestStructureDef() != null) { if (f.attrNearestStructureDef() instanceof InterfaceDef) { check(VisibilityPrivate.class, VisibilityProtected.class, ModAbstract.class, ModOverride.class, Annotation.class); } else { check(VisibilityPrivate.class, VisibilityProtected.class, ModAbstract.class, ModOverride.class, ModStatic.class, Annotation.class); if (f.attrNearestStructureDef() instanceof ClassDef) { if (f.attrIsStatic() && f.attrIsAbstract()) { f.addError("Static functions cannot be abstract."); } } } } else { check(VisibilityPublic.class, Annotation.class); } if (f.attrIsCompiletime()) { if (f.getParameters().size() > 0) { f.addError("Functions annotated '@compiletime' may not take parameters." + "\nNote: The annotation marks functions to be executed by wurst at compiletime."); } else if (f.attrIsDynamicClassMember()) { f.addError("Functions annotated '@compiletime' must be static." + "\nNote: The annotation marks functions to be executed by wurst at compiletime."); } } } @Override public void case_ExtensionFuncDef(ExtensionFuncDef extensionFuncDef) { check(VisibilityPublic.class, Annotation.class); } @Override public void case_ConstructorDef(ConstructorDef constructorDef) { check(VisibilityPrivate.class); } @Override public void case_ClassDef(ClassDef classDef) { check(VisibilityPublic.class, ModAbstract.class, ModStatic.class); if (!classDef.isInnerClass() && classDef.attrIsStatic()) { classDef.addError("Top-level class " + classDef.getName() + " cannot be static. " + "Only inner classes can be declared static."); } } @Override public void case_InterfaceDef(InterfaceDef interfaceDef) { check(VisibilityPublic.class); } @Override public void case_TupleDef(TupleDef tupleDef) { check(VisibilityPublic.class); } @Override public void case_WPackage(WPackage wPackage) { check(); } @Override public void case_EnumDef(EnumDef enumDef) { check(VisibilityPublic.class); } @Override public void case_EnumMember(EnumMember enumMember) { check(); } }); if (error.length() > 0) { if (m.attrSource().getFile().endsWith(".jurst")) { // for jurst only add a warning: m.addWarning(error.toString()); } else { m.addError(error.toString()); } } } } private static String printMod(Class<? extends Modifier> c) { String name = c.getName().toLowerCase(); name = name.replaceFirst("^.*\\.", ""); name = name.replaceAll("^(mod|visibility)", ""); name = name.replaceAll("impl$", ""); return name; } private static String printMod(Modifier m) { if (m instanceof Annotation) { return ((Annotation) m).getAnnotationType(); } return printMod(m.getClass()); } private void checkConstructor(ConstructorDef d) { if (d.attrNearestClassOrModule() instanceof ModuleDef) { if (d.getParameters().size() > 0) { d.getParameters().addError("Module constructors must not have parameters."); } } StructureDef s = d.attrNearestStructureDef(); if (s instanceof ClassDef) { ClassDef c = (ClassDef) s; WurstTypeClass ct = c.attrTypC(); WurstTypeClass extendedClass = ct.extendedClass(); if (extendedClass != null) { // check if super constructor is called correctly... // TODO check constr: get it from ct so that it has the correct type binding ConstructorDef sc = d.attrSuperConstructor(); if (sc == null) { d.addError("No super constructor found."); } else { List<WurstType> paramTypes = Lists.newArrayList(); for (WParameter p : sc.getParameters()) { paramTypes.add(p.attrTyp()); } if (d.getSuperConstructorCall() instanceof NoSuperConstructorCall && paramTypes.size() > 0) { c.addError("The extended class <" + extendedClass.getName() + "> does not expose a no-arg constructor. " + "You must define a constructor that calls super(..) appropriately, in this class."); } else { checkParams(d, "Incorrect call to super constructor: ", superArgs(d), paramTypes); } } } } else { if (d.getSuperConstructorCall() instanceof SomeSuperConstructorCall) { d.addError("Module constructors cannot have super calls."); } } } private void checkArrayAccess(ExprVarArrayAccess ea) { checkNameRefDeprecated(ea, ea.tryGetNameDef()); for (Expr index : ea.getIndexes()) { if (!(index.attrTyp().isSubtypeOf(WurstTypeInt.instance(), ea))) { index.addError("Arrayindices have to be of type int"); } } } private void checkInterfaceDef(InterfaceDef i) { checkTypeName(i, i.getName()); // TODO check if functions are refinements } private void checkNewObj(ExprNewObject e) { ConstructorDef constr = e.attrConstructorDef(); if (constr != null) { calledFunctions.put(e.attrNearestScope(), constr); if (constr.attrNearestClassDef().attrIsAbstract()) { e.addError("Cannot create an instance of the abstract class " + constr.attrNearestClassDef().getName()); return; } checkParams(e, "Wrong object creation: ", e.getArgs(), e.attrFunctionSignature()); } } private void nameDefsMustNotBeNamedAfterJassNativeTypes(NameDef n) { PackageOrGlobal p = n.attrNearestPackage(); if (p == null) { n.addError("Not in package or global: " + n.getName()); } // checkIfTypeDefExists(n, p); // if (p instanceof WPackage) { // // check global scope // p = p.getParent().attrNearestPackage(); // checkIfTypeDefExists(n, p); // } } private void checkMemberVar(ExprMemberVar e) { if (e.getVarName().length() == 0) { e.addError("Incomplete member access."); } if (e.getParent() instanceof WStatements) { e.addError("Incomplete statement."); } } private void checkPackageName(CompilationUnit cu) { if (cu.getPackages().size() == 1 && Utils.isWurstFile(cu.getCuInfo().getFile())) { // only one package in a wurst file WPackage p = cu.getPackages().get(0); if (!Utils.fileName(cu.getCuInfo().getFile()).equals(p.getName() + ".wurst") && !Utils.fileName(cu.getCuInfo().getFile()).equals(p.getName() + ".jurst")) { p.addError("The file must have the same name as the package " + p.getName()); } } } private void checkForDuplicatePackages(WurstModel model) { model.attrPackages(); } private void checkBannedFunctions(ExprFunctionCall e) { if (e.getFuncName().equals("TriggerRegisterVariableEvent")) { if (e.getArgs().size() > 1) { if (e.getArgs().get(1) instanceof ExprStringVal) { ExprStringVal varName = (ExprStringVal) e.getArgs().get(1); TRVEHelper.protectedVariables.add(varName.getValS()); WLogger.info("keep: " + varName.getValS()); return; } else if (e.getArgs().get(1) instanceof ExprVarAccess) { // Check if this is a two line hook... thanks Bribe ExprVarAccess varAccess = (ExprVarAccess) e.getArgs().get(1); @Nullable FunctionImplementation nearestFunc = e.attrNearestFuncDef(); WStatements fbody = nearestFunc.getBody(); if (e.getParent() instanceof StmtReturn && fbody.size() <= 4 && fbody.get(fbody.size() - 2).structuralEquals(e.getParent())) { WParameters params = nearestFunc.getParameters(); if (params.size() == 4 && ((TypeExprSimple) params.get(0).getTyp()).getTypeName().equals("trigger") && ((TypeExprSimple) params.get(1).getTyp()).getTypeName().equals("string") && ((TypeExprSimple) params.get(2).getTyp()).getTypeName().equals("limitop") && ((TypeExprSimple) params.get(3).getTyp()).getTypeName().equals("real")) { trveWrapperFuncs.add(nearestFunc.getName()); WLogger.info("found wrapper: " + nearestFunc.getName()); return; } } } } else { e.addError("Map contains TriggerRegisterVariableEvent with non-constant arguments. Can't be optimized."); } } if (e.getFuncName().equals("ExecuteFunc")) { // executeFunc can only use constant string arguments if (e.getArgs().size() != 1) { e.addError("Wrong number of args"); return; } if (e.getArgs().get(0) instanceof ExprStringVal) { ExprStringVal s = (ExprStringVal) e.getArgs().get(0); String exFunc = s.getValS(); Collection<FuncLink> funcs = e.lookupFuncs(exFunc); if (funcs.isEmpty()) { e.addError("Could not find function " + exFunc + "."); return; } if (funcs.size() > 1) { StringBuilder alternatives = new StringBuilder(); for (NameLink nameLink : funcs) { alternatives.append("\n - ").append(Utils.printElementWithSource(Optional.of(nameLink.getDef()))); } e.addError("Ambiguous function name: " + exFunc + ". Alternatives are: " + alternatives); return; } FuncLink func = Utils.getFirst(funcs); if (func.getParameterTypes().size() != 0) { e.addError("Function " + exFunc + " must not have any parameters."); } } else { e.addError("Wurst does only support ExecuteFunc with a single string as argument."); } } } private boolean isViableSwitchtype(Expr expr) { WurstType typ = expr.attrTyp(); if (typ.equalsType(WurstTypeInt.instance(), null) || typ.equalsType(WurstTypeString.instance(), null)) { return true; } else if (typ instanceof WurstTypeEnum) { WurstTypeEnum wte = (WurstTypeEnum) typ; return !wte.isStaticRef(); } else { return false; } } private void checkSwitch(SwitchStmt s) { if (!isViableSwitchtype(s.getExpr())) { s.addError("The type " + s.getExpr().attrTyp() + " is not viable as switchtype.\nViable switchtypes: int, string, enum"); } else { List<Expr> switchExprs = s.getCases().stream() .flatMap(e -> e.getExpressions().stream()) .collect(Collectors.toList()); for (Expr cExpr : switchExprs) { if (!cExpr.attrTyp().isSubtypeOf(s.getExpr().attrTyp(), cExpr)) { cExpr.addError("The type " + cExpr.attrTyp() + " does not match the switchtype " + s.getExpr().attrTyp() + "."); } } for (int i = 0; i < switchExprs.size(); i++) { Expr ei = switchExprs.get(i); for (int j = 0; j < i; j++) { Expr ej = switchExprs.get(j); if (ei.structuralEquals(ej)) { ei.addError("The case " + Utils.prettyPrint(ei) + " is already handled in line " + ej.attrSource().getLine()); return; } } } } for (String unhandledCase : s.calculateUnhandledCases()) { s.addError(unhandledCase + " not covered in switchstatement and no default found."); } if (s.getCases().isEmpty()) { s.addError("Switch statement without any cases."); } } public static void computeFlowAttributes(Element node) { if (node instanceof WStatement) { WStatement s = (WStatement) node; s.attrNextStatements(); } // traverse childs for (int i = 0; i < node.size(); i++) { computeFlowAttributes(node.get(i)); } } private void checkCodeArrays(TypeExprArray e) { if (e.getBase() instanceof TypeExprSimple) { TypeExprSimple base = (TypeExprSimple) e.getBase(); if (base.getTypeName().equals("code")) { e.addError("Code arrays are not supported. Try using an array of triggers or conditionfuncs."); } } } /** * checks if func1 can override func2 */ public static boolean canOverride(FuncLink func1, FuncLink func2, boolean allowStaticOverride) { return checkOverride(func1, func2, allowStaticOverride) == null; } /** * checks if func1 can override func2 * <p> * Returns null if yes and an error message if not. */ public static String checkOverride(FuncLink func1, FuncLink func2, boolean allowStaticOverride) { if (!allowStaticOverride) { if (func1.isStatic()) { return "Static method " + func1.getName() + " cannot override other methods."; } if (func2.isStatic()) { return "Static " + Utils.printElementWithSource(Optional.of(func2.getDef())) + " cannot be overridden."; } } else { if (func1.isStatic() && !func2.isStatic()) { return "Static method " + func1.getName() + " cannot override dynamic " + Utils.printElementWithSource(Optional.of(func2.getDef())) + "."; } else if (!func1.isStatic() && func2.isStatic()) { return "Method " + func1.getName() + " cannot override static " + Utils.printElementWithSource(Optional.of(func2.getDef())) + "."; } } if (func1.isVarargMethod() && !func2.isVarargMethod()) { return "Vararg method " + func1.getName() + " cannot override non-vararg method " + Utils.printElementWithSource(Optional.of(func2.getDef())) + "."; } if (!func1.isVarargMethod() && func2.isVarargMethod()) { return "Non-vararg method " + func1.getName() + " cannot override vararg method " + Utils.printElementWithSource(Optional.of(func2.getDef())) + "."; } int paramCount2 = func2.getParameterTypes().size(); int paramCount1 = func1.getParameterTypes().size(); if (paramCount1 != paramCount2) { return Utils.printElement(func2.getDef()) + " takes " + paramCount2 + " parameters, but there are only " + paramCount1 + " parameters here."; } // contravariant parametertypes for (int i = 0; i < paramCount1; i++) { WurstType type1 = func1.getParameterType(i); WurstType type2 = func2.getParameterType(i); if (!type1.isSupertypeOf(type2, func1.getDef())) { return "Parameter " + type1 + " " + func1.getParameterName(i) + " should have type " + type2 + " to override " + Utils.printElementWithSource(Optional.of(func2.getDef())) + "."; } } // covariant return types if (!func1.getReturnType().isSubtypeOf(func2.getReturnType(), func1.getDef())) { return "Return type should be " + func2.getReturnType() + " to override " + Utils.printElementWithSource(Optional.of(func2.getDef())) + "."; } // no error return null; } private void checkForDuplicateNames(WScope scope) { ImmutableMultimap<String, DefLink> links = scope.attrNameLinks(); for (String name : links.keySet()) { ImmutableCollection<DefLink> nameLinks = links.get(name); if (nameLinks.size() <= 1) { continue; } @Nullable List<FuncLink> funcs = null; @Nullable List<NameLink> other = null; for (NameLink nl : nameLinks) { if (nl.getDefinedIn() == scope) { if (nl instanceof FuncLink) { if (funcs == null) { funcs = Lists.newArrayList(); } FuncLink funcLink = (FuncLink) nl; for (FuncLink link : funcs) { if (!distinctFunctions(funcLink, link)) { funcLink.getDef().addError( "Function already defined : " + Utils.printElementWithSource(Optional.of(link.getDef()))); link.getDef().addError( "Function already defined : " + Utils.printElementWithSource(Optional.of(funcLink.getDef()))); } } funcs.add(funcLink); } else { if (other == null) { other = Lists.newArrayList(); } other.add(nl); } } } if (other != null && other.size() > 1) { other.sort(Comparator.comparingInt(o -> o.getDef().attrSource().getLeftPos())); NameLink l1 = other.get(0); for (int j = 1; j < other.size(); j++) { other.get(j).getDef().addError("An element with name " + name + " already exists: " + Utils.printElementWithSource(Optional.of(l1.getDef()))); } } } } private boolean distinctFunctions(FuncLink nl1, FuncLink nl2) { if (receiverTypesDifferent(nl1, nl2)) { return true; } FunctionDefinition f1 = nl1.getDef(); FunctionDefinition f2 = nl2.getDef(); WParameters ps1 = f1.getParameters(); WParameters ps2 = f2.getParameters(); if (ps1.size() != ps2.size()) { return true; } return parametersTypeDisjunct(ps1, ps2); } private boolean receiverTypesDifferent(FuncLink nl1, FuncLink nl2) { if (nl1.getReceiverType() == null) { return nl2.getReceiverType() != null; } else { return nl2.getReceiverType() == null || !nl1.getReceiverType().equalsType(nl2.getReceiverType(), nl1.getDef()); } } private void checkForDuplicateImports(WPackage p) { Set<String> imports = Sets.newLinkedHashSet(); for (WImport imp : p.getImports()) { if (!imports.add(imp.getPackagename())) { imp.addError("The package " + imp.getPackagename() + " is already imported."); } } } private void checkVarDef(VarDef v) { WurstType vtype = v.attrTyp(); if (vtype instanceof WurstTypeCode && v.attrIsDynamicClassMember()) { v.addError("Code members not allowed as dynamic class members (variable " + v.getName() + ")\n" + "Try using a trigger or conditionfunc instead."); } if (v instanceof GlobalOrLocalVarDef) { GlobalOrLocalVarDef g = (GlobalOrLocalVarDef) v; if (g.attrIsConstant() && g.getInitialExpr() instanceof NoExpr && !g.attrIsDynamicClassMember()) { g.addError("Constant variable " + g.getName() + " needs an initial value."); } } if (vtype instanceof WurstTypeArray) { WurstTypeArray wta = (WurstTypeArray) vtype; switch (wta.getDimensions()) { case 0: v.addError("0-dimensional arrays are not allowed"); break; case 1: if (v.attrIsDynamicClassMember() && wta.getSize(0) <= 0) { v.addError("Array members require a fixed size greater 0."); } break; default: v.addError("Multidimensional Arrays are not yet supported."); break; } } if (vtype instanceof WurstTypeNull) { v.addError("Initial value of variable " + v.getName() + " is 'null'. Specify a concrete type."); } } private void checkLocalShadowing(LocalVarDef v) { NameLink shadowed = v.getParent().getParent().lookupVar(v.getName(), false); if (shadowed != null) { if (shadowed.getDef() instanceof LocalVarDef) { v.addError("Variable " + v.getName() + " hides an other local variable with the same name."); } else if (shadowed.getDef() instanceof WParameter) { v.addError("Variable " + v.getName() + " hides a parameter with the same name."); } } } private void checkConstructorSuperCall(ConstructorDef c) { if (c.getSuperConstructorCall() instanceof SomeSuperConstructorCall) { if (c.attrNearestClassDef() != null) { ClassDef classDef = c.attrNearestClassDef(); if (classDef.getExtendedClass() instanceof NoTypeExpr) { c.addError("Super call in a class which extends nothing."); } } } } private void checkParameter(WParameter param) { if (param.attrTyp() instanceof WurstTypeArray) { param.addError("Cannot use arrays as parameters."); } } }
wurstscript/WurstScript
de.peeeq.wurstscript/src/main/java/de/peeeq/wurstscript/validation/WurstValidator.java
Java
apache-2.0
98,606
package fr.iut.csid.empower.elearning.core.service.impl; import javax.inject.Inject; import javax.inject.Named; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import fr.iut.csid.empower.elearning.core.domain.user.Teacher; import fr.iut.csid.empower.elearning.core.dto.impl.UserDTO; import fr.iut.csid.empower.elearning.core.exception.UserNotExistsException; import fr.iut.csid.empower.elearning.core.service.TeacherService; import fr.iut.csid.empower.elearning.core.service.dao.user.TeacherDAO; /** * */ @Named public class TeacherServiceImpl extends AbstractCrudService<Teacher, Long> implements TeacherService { private Logger logger = LoggerFactory.getLogger(TeacherServiceImpl.class); @Inject private TeacherDAO teacherDAO; @Override protected JpaRepository<Teacher, Long> getDAO() { return teacherDAO; } @Override public Teacher findByLogin(String login) { return teacherDAO.findByLogin(login); } @Transactional(propagation = Propagation.REQUIRED) public Teacher createFromDTO(UserDTO entityDTO) { logger.info("Try saving entityDTO [" + entityDTO.toString() + "] : [" + entityDTO.getFirstName() + "][" + entityDTO.getLastName() + "][" + entityDTO.getLogin() + "][" + entityDTO.getPassword() + "][" + entityDTO.getEmail() + "]"); Teacher teacher = new Teacher(entityDTO.getFirstName(), entityDTO.getLastName(), entityDTO.getLogin(), entityDTO.getPassword(), entityDTO.getEmail()); return teacherDAO.save(teacher); } @Transactional(propagation = Propagation.REQUIRED) public Teacher saveFromDTO(UserDTO entityDTO, Long id) { Teacher teacher = teacherDAO.findOne(id); if (teacher != null) { // Pas de questions, on reporte tous les changements teacher.setFirstName(entityDTO.getFirstName()); teacher.setLastName(entityDTO.getLastName()); teacher.setLogin(entityDTO.getLogin()); teacher.setEmail(entityDTO.getEmail()); // TODO ??? sécurité ??? teacher.setPassword(entityDTO.getPassword()); return teacherDAO.save(teacher); } else throw new UserNotExistsException(); } }
piibl/elearning-parent
elearning-core/src/main/java/fr/iut/csid/empower/elearning/core/service/impl/TeacherServiceImpl.java
Java
apache-2.0
2,254
package com.damocles.sample; import com.damocles.R; import com.damocles.sample.util.Utils; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.view.View; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.TextView; public class ListViewSectionActivity extends AppCompatActivity { private TextView mTopHeader; private ListView mListView; private int mTopVisiblePosition = 0; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_listview_section); Utils.initToolbar(this, R.id.listview_section_toolbar); initViews(); } private void initViews() { mTopHeader = (TextView) findViewById(R.id.listview_section_header); mListView = (ListView) findViewById(R.id.listview_section_list); mListView.setAdapter(new SectionAdapter(COUNTRIES)); mListView.setOnScrollListener(new AbsListView.OnScrollListener() { @Override public void onScrollStateChanged(AbsListView view, int scrollState) { } @Override public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { if (firstVisibleItem != mTopVisiblePosition) { mTopVisiblePosition = firstVisibleItem; setTopHeader(firstVisibleItem); } } }); setTopHeader(0); } private void setTopHeader(int position) { mTopHeader.setText(COUNTRIES[position].substring(0, 1)); } private class SectionAdapter extends ArrayAdapter<String> { public SectionAdapter(String[] objs) { super(ListViewSectionActivity.this, R.layout.activity_listview_section_item, R.id .listview_section_list_item_label, objs); } @Override public View getView(int position, View convertView, ViewGroup parent) { if (convertView == null) { convertView = ListViewSectionActivity.this.getLayoutInflater().inflate(R.layout .activity_listview_section_item, parent, false); } TextView header = (TextView) convertView.findViewById(R.id.listview_section_header); String label = getItem(position); // 当前item与前一item首字母不同,显示header if (position == 0 || getItem(position - 1).charAt(0) != label.charAt(0)) { header.setText(label.substring(0, 1)); header.setVisibility(View.VISIBLE); } else { header.setVisibility(View.GONE); } return super.getView(position, convertView, parent); } } private static final String[] COUNTRIES = new String[] { "Afghanistan", "Albania", "Algeria", "American Samoa", "Andorra", "Angola", "Anguilla", "Antarctica", "Antigua and Barbuda", "Argentina", "Armenia", "Aruba", "Australia", "Austria", "Azerbaijan", "Bahrain", "Bangladesh", "Barbados", "Belarus", "Belgium", "Belize", "Benin", "Bermuda", "Bhutan", "Bolivia", "Bosnia and Herzegovina", "Botswana", "Bouvet Island", "Brazil", "British Indian Ocean Territory", "British Virgin Islands", "Brunei", "Bulgaria", "Burkina Faso", "Burundi", "Cote d'Ivoire", "Cambodia", "Cameroon", "Canada", "Cape Verde", "Cayman Islands", "Central African Republic", "Chad", "Chile", "China", "Christmas Island", "Cocos (Keeling) Islands", "Colombia", "Comoros", "Congo", "Cook Islands", "Costa Rica", "Croatia", "Cuba", "Cyprus", "Czech Republic", "Democratic Republic of the Congo", "Denmark", "Djibouti", "Dominica", "Dominican Republic", "East Timor", "Ecuador", "Egypt", "El Salvador", "Equatorial Guinea", "Eritrea", "Estonia", "Ethiopia", "Faeroe Islands", "Falkland Islands", "Fiji", "Finland", "Former Yugoslav Republic of Macedonia", "France", "French Guiana", "French Polynesia", "French Southern Territories", "Gabon", "Georgia", "Germany", "Ghana", "Gibraltar", "Greece", "Greenland", "Grenada", "Guadeloupe", "Guam", "Guatemala", "Guinea", "Guinea-Bissau", "Guyana", "Haiti", "Heard Island and McDonald Islands", "Honduras", "Hong Kong", "Hungary", "Iceland", "India", "Indonesia", "Iran", "Iraq", "Ireland", "Israel", "Italy", "Jamaica", "Japan", "Jordan", "Kazakhstan", "Kenya", "Kiribati", "Kuwait", "Kyrgyzstan", "Laos", "Latvia", "Lebanon", "Lesotho", "Liberia", "Libya", "Liechtenstein", "Lithuania", "Luxembourg", "Macau", "Madagascar", "Malawi", "Malaysia", "Maldives", "Mali", "Malta", "Marshall Islands", "Martinique", "Mauritania", "Mauritius", "Mayotte", "Mexico", "Micronesia", "Moldova", "Monaco", "Mongolia", "Montserrat", "Morocco", "Mozambique", "Myanmar", "Namibia", "Nauru", "Nepal", "Netherlands", "Netherlands Antilles", "New Caledonia", "New Zealand", "Nicaragua", "Niger", "Nigeria", "Niue", "Norfolk Island", "North Korea", "Northern Marianas", "Norway", "Oman", "Pakistan", "Palau", "Panama", "Papua New Guinea", "Paraguay", "Peru", "Philippines", "Pitcairn Islands", "Poland", "Portugal", "Puerto Rico", "Qatar", "Reunion", "Romania", "Russia", "Rwanda", "Sqo Tome and Principe", "Saint Helena", "Saint Kitts and Nevis", "Saint Lucia", "Saint Pierre and Miquelon", "Saint Vincent and the Grenadines", "Samoa", "San Marino", "Saudi Arabia", "Senegal", "Seychelles", "Sierra Leone", "Singapore", "Slovakia", "Slovenia", "Solomon Islands", "Somalia", "South Africa", "South Georgia and the South Sandwich Islands", "South Korea", "Spain", "Sri Lanka", "Sudan", "Suriname", "Svalbard and Jan Mayen", "Swaziland", "Sweden", "Switzerland", "Syria", "Taiwan", "Tajikistan", "Tanzania", "Thailand", "The Bahamas", "The Gambia", "Togo", "Tokelau", "Tonga", "Trinidad and Tobago", "Tunisia", "Turkey", "Turkmenistan", "Turks and Caicos Islands", "Tuvalu", "Uganda", "Ukraine", "United Arab Emirates", "United Kingdom", "United States", "United States Minor Outlying Islands", "Uruguay", "Uzbekistan", "Vanuatu", "Vatican City", "Venezuela", "Vietnam", "Virgin Islands", "Wallis and Futuna", "Western Sahara", "Yemen", "Yugoslavia", "Zambia", "Zimbabwe"}; }
zhanglong-daniel/Damocles
app/src/main/java/com/damocles/sample/ListViewSectionActivity.java
Java
apache-2.0
6,973
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: ga4gh/sequence_annotation_service.proto package ga4gh; public final class SequenceAnnotationServiceOuterClass { private SequenceAnnotationServiceOuterClass() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface SearchFeatureSetsRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:ga4gh.SearchFeatureSetsRequest) com.google.protobuf.MessageOrBuilder { /** * <pre> * The `Dataset` to search. * </pre> * * <code>optional string dataset_id = 1;</code> */ java.lang.String getDatasetId(); /** * <pre> * The `Dataset` to search. * </pre> * * <code>optional string dataset_id = 1;</code> */ com.google.protobuf.ByteString getDatasetIdBytes(); /** * <pre> * Specifies the maximum number of results to return in a single page. * If unspecified, a system default will be used. * </pre> * * <code>optional int32 page_size = 2;</code> */ int getPageSize(); /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 3;</code> */ java.lang.String getPageToken(); /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 3;</code> */ com.google.protobuf.ByteString getPageTokenBytes(); } /** * <pre> ** This request maps to the body of `POST /featuresets/search` as JSON. * </pre> * * Protobuf type {@code ga4gh.SearchFeatureSetsRequest} */ public static final class SearchFeatureSetsRequest extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:ga4gh.SearchFeatureSetsRequest) SearchFeatureSetsRequestOrBuilder { // Use SearchFeatureSetsRequest.newBuilder() to construct. private SearchFeatureSetsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); } private SearchFeatureSetsRequest() { datasetId_ = ""; pageSize_ = 0; pageToken_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private SearchFeatureSetsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { java.lang.String s = input.readStringRequireUtf8(); datasetId_ = s; break; } case 16: { pageSize_ = input.readInt32(); break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); pageToken_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeatureSetsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeatureSetsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest.class, ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest.Builder.class); } public static final int DATASET_ID_FIELD_NUMBER = 1; private volatile java.lang.Object datasetId_; /** * <pre> * The `Dataset` to search. * </pre> * * <code>optional string dataset_id = 1;</code> */ public java.lang.String getDatasetId() { java.lang.Object ref = datasetId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); datasetId_ = s; return s; } } /** * <pre> * The `Dataset` to search. * </pre> * * <code>optional string dataset_id = 1;</code> */ public com.google.protobuf.ByteString getDatasetIdBytes() { java.lang.Object ref = datasetId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); datasetId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_; /** * <pre> * Specifies the maximum number of results to return in a single page. * If unspecified, a system default will be used. * </pre> * * <code>optional int32 page_size = 2;</code> */ public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; private volatile java.lang.Object pageToken_; /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 3;</code> */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 3;</code> */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getDatasetIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 1, datasetId_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!getPageTokenBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 3, pageToken_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getDatasetIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(1, datasetId_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, pageSize_); } if (!getPageTokenBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(3, pageToken_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** This request maps to the body of `POST /featuresets/search` as JSON. * </pre> * * Protobuf type {@code ga4gh.SearchFeatureSetsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:ga4gh.SearchFeatureSetsRequest) ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeatureSetsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeatureSetsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest.class, ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest.Builder.class); } // Construct using ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); datasetId_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeatureSetsRequest_descriptor; } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest getDefaultInstanceForType() { return ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest.getDefaultInstance(); } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest build() { ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest buildPartial() { ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest result = new ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest(this); result.datasetId_ = datasetId_; result.pageSize_ = pageSize_; result.pageToken_ = pageToken_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest) { return mergeFrom((ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest other) { if (other == ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest.getDefaultInstance()) return this; if (!other.getDatasetId().isEmpty()) { datasetId_ = other.datasetId_; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object datasetId_ = ""; /** * <pre> * The `Dataset` to search. * </pre> * * <code>optional string dataset_id = 1;</code> */ public java.lang.String getDatasetId() { java.lang.Object ref = datasetId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); datasetId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The `Dataset` to search. * </pre> * * <code>optional string dataset_id = 1;</code> */ public com.google.protobuf.ByteString getDatasetIdBytes() { java.lang.Object ref = datasetId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); datasetId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The `Dataset` to search. * </pre> * * <code>optional string dataset_id = 1;</code> */ public Builder setDatasetId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } datasetId_ = value; onChanged(); return this; } /** * <pre> * The `Dataset` to search. * </pre> * * <code>optional string dataset_id = 1;</code> */ public Builder clearDatasetId() { datasetId_ = getDefaultInstance().getDatasetId(); onChanged(); return this; } /** * <pre> * The `Dataset` to search. * </pre> * * <code>optional string dataset_id = 1;</code> */ public Builder setDatasetIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); datasetId_ = value; onChanged(); return this; } private int pageSize_ ; /** * <pre> * Specifies the maximum number of results to return in a single page. * If unspecified, a system default will be used. * </pre> * * <code>optional int32 page_size = 2;</code> */ public int getPageSize() { return pageSize_; } /** * <pre> * Specifies the maximum number of results to return in a single page. * If unspecified, a system default will be used. * </pre> * * <code>optional int32 page_size = 2;</code> */ public Builder setPageSize(int value) { pageSize_ = value; onChanged(); return this; } /** * <pre> * Specifies the maximum number of results to return in a single page. * If unspecified, a system default will be used. * </pre> * * <code>optional int32 page_size = 2;</code> */ public Builder clearPageSize() { pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 3;</code> */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 3;</code> */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 3;</code> */ public Builder setPageToken( java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; onChanged(); return this; } /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 3;</code> */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); onChanged(); return this; } /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 3;</code> */ public Builder setPageTokenBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:ga4gh.SearchFeatureSetsRequest) } // @@protoc_insertion_point(class_scope:ga4gh.SearchFeatureSetsRequest) private static final ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest(); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchFeatureSetsRequest> PARSER = new com.google.protobuf.AbstractParser<SearchFeatureSetsRequest>() { public SearchFeatureSetsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SearchFeatureSetsRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<SearchFeatureSetsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchFeatureSetsRequest> getParserForType() { return PARSER; } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SearchFeatureSetsResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:ga4gh.SearchFeatureSetsResponse) com.google.protobuf.MessageOrBuilder { /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ java.util.List<ga4gh.SequenceAnnotations.FeatureSet> getFeatureSetsList(); /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ ga4gh.SequenceAnnotations.FeatureSet getFeatureSets(int index); /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ int getFeatureSetsCount(); /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ java.util.List<? extends ga4gh.SequenceAnnotations.FeatureSetOrBuilder> getFeatureSetsOrBuilderList(); /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ ga4gh.SequenceAnnotations.FeatureSetOrBuilder getFeatureSetsOrBuilder( int index); /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ java.lang.String getNextPageToken(); /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ com.google.protobuf.ByteString getNextPageTokenBytes(); } /** * <pre> * This is the response from `POST /featuresets/search` expressed as JSON. * </pre> * * Protobuf type {@code ga4gh.SearchFeatureSetsResponse} */ public static final class SearchFeatureSetsResponse extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:ga4gh.SearchFeatureSetsResponse) SearchFeatureSetsResponseOrBuilder { // Use SearchFeatureSetsResponse.newBuilder() to construct. private SearchFeatureSetsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); } private SearchFeatureSetsResponse() { featureSets_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private SearchFeatureSetsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { featureSets_ = new java.util.ArrayList<ga4gh.SequenceAnnotations.FeatureSet>(); mutable_bitField0_ |= 0x00000001; } featureSets_.add(input.readMessage(ga4gh.SequenceAnnotations.FeatureSet.parser(), extensionRegistry)); break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); nextPageToken_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { featureSets_ = java.util.Collections.unmodifiableList(featureSets_); } makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeatureSetsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeatureSetsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse.class, ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse.Builder.class); } private int bitField0_; public static final int FEATURE_SETS_FIELD_NUMBER = 1; private java.util.List<ga4gh.SequenceAnnotations.FeatureSet> featureSets_; /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public java.util.List<ga4gh.SequenceAnnotations.FeatureSet> getFeatureSetsList() { return featureSets_; } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public java.util.List<? extends ga4gh.SequenceAnnotations.FeatureSetOrBuilder> getFeatureSetsOrBuilderList() { return featureSets_; } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public int getFeatureSetsCount() { return featureSets_.size(); } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public ga4gh.SequenceAnnotations.FeatureSet getFeatureSets(int index) { return featureSets_.get(index); } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public ga4gh.SequenceAnnotations.FeatureSetOrBuilder getFeatureSetsOrBuilder( int index) { return featureSets_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; private volatile java.lang.Object nextPageToken_; /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < featureSets_.size(); i++) { output.writeMessage(1, featureSets_.get(i)); } if (!getNextPageTokenBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 2, nextPageToken_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < featureSets_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, featureSets_.get(i)); } if (!getNextPageTokenBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(2, nextPageToken_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * This is the response from `POST /featuresets/search` expressed as JSON. * </pre> * * Protobuf type {@code ga4gh.SearchFeatureSetsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:ga4gh.SearchFeatureSetsResponse) ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeatureSetsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeatureSetsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse.class, ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse.Builder.class); } // Construct using ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getFeatureSetsFieldBuilder(); } } public Builder clear() { super.clear(); if (featureSetsBuilder_ == null) { featureSets_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { featureSetsBuilder_.clear(); } nextPageToken_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeatureSetsResponse_descriptor; } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse getDefaultInstanceForType() { return ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse.getDefaultInstance(); } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse build() { ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse buildPartial() { ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse result = new ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (featureSetsBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { featureSets_ = java.util.Collections.unmodifiableList(featureSets_); bitField0_ = (bitField0_ & ~0x00000001); } result.featureSets_ = featureSets_; } else { result.featureSets_ = featureSetsBuilder_.build(); } result.nextPageToken_ = nextPageToken_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse) { return mergeFrom((ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse other) { if (other == ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse.getDefaultInstance()) return this; if (featureSetsBuilder_ == null) { if (!other.featureSets_.isEmpty()) { if (featureSets_.isEmpty()) { featureSets_ = other.featureSets_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureFeatureSetsIsMutable(); featureSets_.addAll(other.featureSets_); } onChanged(); } } else { if (!other.featureSets_.isEmpty()) { if (featureSetsBuilder_.isEmpty()) { featureSetsBuilder_.dispose(); featureSetsBuilder_ = null; featureSets_ = other.featureSets_; bitField0_ = (bitField0_ & ~0x00000001); featureSetsBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getFeatureSetsFieldBuilder() : null; } else { featureSetsBuilder_.addAllMessages(other.featureSets_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<ga4gh.SequenceAnnotations.FeatureSet> featureSets_ = java.util.Collections.emptyList(); private void ensureFeatureSetsIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { featureSets_ = new java.util.ArrayList<ga4gh.SequenceAnnotations.FeatureSet>(featureSets_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< ga4gh.SequenceAnnotations.FeatureSet, ga4gh.SequenceAnnotations.FeatureSet.Builder, ga4gh.SequenceAnnotations.FeatureSetOrBuilder> featureSetsBuilder_; /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public java.util.List<ga4gh.SequenceAnnotations.FeatureSet> getFeatureSetsList() { if (featureSetsBuilder_ == null) { return java.util.Collections.unmodifiableList(featureSets_); } else { return featureSetsBuilder_.getMessageList(); } } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public int getFeatureSetsCount() { if (featureSetsBuilder_ == null) { return featureSets_.size(); } else { return featureSetsBuilder_.getCount(); } } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public ga4gh.SequenceAnnotations.FeatureSet getFeatureSets(int index) { if (featureSetsBuilder_ == null) { return featureSets_.get(index); } else { return featureSetsBuilder_.getMessage(index); } } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public Builder setFeatureSets( int index, ga4gh.SequenceAnnotations.FeatureSet value) { if (featureSetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFeatureSetsIsMutable(); featureSets_.set(index, value); onChanged(); } else { featureSetsBuilder_.setMessage(index, value); } return this; } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public Builder setFeatureSets( int index, ga4gh.SequenceAnnotations.FeatureSet.Builder builderForValue) { if (featureSetsBuilder_ == null) { ensureFeatureSetsIsMutable(); featureSets_.set(index, builderForValue.build()); onChanged(); } else { featureSetsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public Builder addFeatureSets(ga4gh.SequenceAnnotations.FeatureSet value) { if (featureSetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFeatureSetsIsMutable(); featureSets_.add(value); onChanged(); } else { featureSetsBuilder_.addMessage(value); } return this; } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public Builder addFeatureSets( int index, ga4gh.SequenceAnnotations.FeatureSet value) { if (featureSetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFeatureSetsIsMutable(); featureSets_.add(index, value); onChanged(); } else { featureSetsBuilder_.addMessage(index, value); } return this; } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public Builder addFeatureSets( ga4gh.SequenceAnnotations.FeatureSet.Builder builderForValue) { if (featureSetsBuilder_ == null) { ensureFeatureSetsIsMutable(); featureSets_.add(builderForValue.build()); onChanged(); } else { featureSetsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public Builder addFeatureSets( int index, ga4gh.SequenceAnnotations.FeatureSet.Builder builderForValue) { if (featureSetsBuilder_ == null) { ensureFeatureSetsIsMutable(); featureSets_.add(index, builderForValue.build()); onChanged(); } else { featureSetsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public Builder addAllFeatureSets( java.lang.Iterable<? extends ga4gh.SequenceAnnotations.FeatureSet> values) { if (featureSetsBuilder_ == null) { ensureFeatureSetsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, featureSets_); onChanged(); } else { featureSetsBuilder_.addAllMessages(values); } return this; } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public Builder clearFeatureSets() { if (featureSetsBuilder_ == null) { featureSets_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { featureSetsBuilder_.clear(); } return this; } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public Builder removeFeatureSets(int index) { if (featureSetsBuilder_ == null) { ensureFeatureSetsIsMutable(); featureSets_.remove(index); onChanged(); } else { featureSetsBuilder_.remove(index); } return this; } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public ga4gh.SequenceAnnotations.FeatureSet.Builder getFeatureSetsBuilder( int index) { return getFeatureSetsFieldBuilder().getBuilder(index); } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public ga4gh.SequenceAnnotations.FeatureSetOrBuilder getFeatureSetsOrBuilder( int index) { if (featureSetsBuilder_ == null) { return featureSets_.get(index); } else { return featureSetsBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public java.util.List<? extends ga4gh.SequenceAnnotations.FeatureSetOrBuilder> getFeatureSetsOrBuilderList() { if (featureSetsBuilder_ != null) { return featureSetsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(featureSets_); } } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public ga4gh.SequenceAnnotations.FeatureSet.Builder addFeatureSetsBuilder() { return getFeatureSetsFieldBuilder().addBuilder( ga4gh.SequenceAnnotations.FeatureSet.getDefaultInstance()); } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public ga4gh.SequenceAnnotations.FeatureSet.Builder addFeatureSetsBuilder( int index) { return getFeatureSetsFieldBuilder().addBuilder( index, ga4gh.SequenceAnnotations.FeatureSet.getDefaultInstance()); } /** * <pre> * The list of matching feature sets. * </pre> * * <code>repeated .ga4gh.FeatureSet feature_sets = 1;</code> */ public java.util.List<ga4gh.SequenceAnnotations.FeatureSet.Builder> getFeatureSetsBuilderList() { return getFeatureSetsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< ga4gh.SequenceAnnotations.FeatureSet, ga4gh.SequenceAnnotations.FeatureSet.Builder, ga4gh.SequenceAnnotations.FeatureSetOrBuilder> getFeatureSetsFieldBuilder() { if (featureSetsBuilder_ == null) { featureSetsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< ga4gh.SequenceAnnotations.FeatureSet, ga4gh.SequenceAnnotations.FeatureSet.Builder, ga4gh.SequenceAnnotations.FeatureSetOrBuilder>( featureSets_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); featureSets_ = null; } return featureSetsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public Builder setNextPageToken( java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; onChanged(); return this; } /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); onChanged(); return this; } /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public Builder setNextPageTokenBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:ga4gh.SearchFeatureSetsResponse) } // @@protoc_insertion_point(class_scope:ga4gh.SearchFeatureSetsResponse) private static final ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse(); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchFeatureSetsResponse> PARSER = new com.google.protobuf.AbstractParser<SearchFeatureSetsResponse>() { public SearchFeatureSetsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SearchFeatureSetsResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<SearchFeatureSetsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchFeatureSetsResponse> getParserForType() { return PARSER; } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeatureSetsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetFeatureSetRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:ga4gh.GetFeatureSetRequest) com.google.protobuf.MessageOrBuilder { /** * <pre> * The ID of the `FeatureSet` to be retrieved. * </pre> * * <code>optional string feature_set_id = 1;</code> */ java.lang.String getFeatureSetId(); /** * <pre> * The ID of the `FeatureSet` to be retrieved. * </pre> * * <code>optional string feature_set_id = 1;</code> */ com.google.protobuf.ByteString getFeatureSetIdBytes(); } /** * <pre> * This request maps to the URL `GET /featuresets/{id}`. * </pre> * * Protobuf type {@code ga4gh.GetFeatureSetRequest} */ public static final class GetFeatureSetRequest extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:ga4gh.GetFeatureSetRequest) GetFeatureSetRequestOrBuilder { // Use GetFeatureSetRequest.newBuilder() to construct. private GetFeatureSetRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); } private GetFeatureSetRequest() { featureSetId_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private GetFeatureSetRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { java.lang.String s = input.readStringRequireUtf8(); featureSetId_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_GetFeatureSetRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_GetFeatureSetRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest.class, ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest.Builder.class); } public static final int FEATURE_SET_ID_FIELD_NUMBER = 1; private volatile java.lang.Object featureSetId_; /** * <pre> * The ID of the `FeatureSet` to be retrieved. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public java.lang.String getFeatureSetId() { java.lang.Object ref = featureSetId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); featureSetId_ = s; return s; } } /** * <pre> * The ID of the `FeatureSet` to be retrieved. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public com.google.protobuf.ByteString getFeatureSetIdBytes() { java.lang.Object ref = featureSetId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); featureSetId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getFeatureSetIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 1, featureSetId_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getFeatureSetIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(1, featureSetId_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * This request maps to the URL `GET /featuresets/{id}`. * </pre> * * Protobuf type {@code ga4gh.GetFeatureSetRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:ga4gh.GetFeatureSetRequest) ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_GetFeatureSetRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_GetFeatureSetRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest.class, ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest.Builder.class); } // Construct using ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); featureSetId_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_GetFeatureSetRequest_descriptor; } public ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest getDefaultInstanceForType() { return ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest.getDefaultInstance(); } public ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest build() { ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest buildPartial() { ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest result = new ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest(this); result.featureSetId_ = featureSetId_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest) { return mergeFrom((ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest other) { if (other == ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest.getDefaultInstance()) return this; if (!other.getFeatureSetId().isEmpty()) { featureSetId_ = other.featureSetId_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object featureSetId_ = ""; /** * <pre> * The ID of the `FeatureSet` to be retrieved. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public java.lang.String getFeatureSetId() { java.lang.Object ref = featureSetId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); featureSetId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The ID of the `FeatureSet` to be retrieved. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public com.google.protobuf.ByteString getFeatureSetIdBytes() { java.lang.Object ref = featureSetId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); featureSetId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The ID of the `FeatureSet` to be retrieved. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public Builder setFeatureSetId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } featureSetId_ = value; onChanged(); return this; } /** * <pre> * The ID of the `FeatureSet` to be retrieved. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public Builder clearFeatureSetId() { featureSetId_ = getDefaultInstance().getFeatureSetId(); onChanged(); return this; } /** * <pre> * The ID of the `FeatureSet` to be retrieved. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public Builder setFeatureSetIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); featureSetId_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:ga4gh.GetFeatureSetRequest) } // @@protoc_insertion_point(class_scope:ga4gh.GetFeatureSetRequest) private static final ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest(); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GetFeatureSetRequest> PARSER = new com.google.protobuf.AbstractParser<GetFeatureSetRequest>() { public GetFeatureSetRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetFeatureSetRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<GetFeatureSetRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GetFeatureSetRequest> getParserForType() { return PARSER; } public ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureSetRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SearchFeaturesRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:ga4gh.SearchFeaturesRequest) com.google.protobuf.MessageOrBuilder { /** * <pre> * The annotation set to search within. Either `feature_set_id` or * `parent_id` must be non-empty. * </pre> * * <code>optional string feature_set_id = 1;</code> */ java.lang.String getFeatureSetId(); /** * <pre> * The annotation set to search within. Either `feature_set_id` or * `parent_id` must be non-empty. * </pre> * * <code>optional string feature_set_id = 1;</code> */ com.google.protobuf.ByteString getFeatureSetIdBytes(); /** * <pre> * Restricts the search to direct children of the given parent `feature` * ID. Either `feature_set_id` or `parent_id` must be non-empty. * </pre> * * <code>optional string parent_id = 2;</code> */ java.lang.String getParentId(); /** * <pre> * Restricts the search to direct children of the given parent `feature` * ID. Either `feature_set_id` or `parent_id` must be non-empty. * </pre> * * <code>optional string parent_id = 2;</code> */ com.google.protobuf.ByteString getParentIdBytes(); /** * <pre> * Only return features on the reference with this name * (matched to literal reference name as imported from the GFF3). * </pre> * * <code>optional string reference_name = 3;</code> */ java.lang.String getReferenceName(); /** * <pre> * Only return features on the reference with this name * (matched to literal reference name as imported from the GFF3). * </pre> * * <code>optional string reference_name = 3;</code> */ com.google.protobuf.ByteString getReferenceNameBytes(); /** * <pre> * Required. The beginning of the window (0-based, inclusive) for which * overlapping features should be returned. Genomic positions are * non-negative integers less than reference length. Requests spanning the * join of circular genomes are represented as two requests one on each side * of the join (position 0). * </pre> * * <code>optional int64 start = 4;</code> */ long getStart(); /** * <pre> * Required. The end of the window (0-based, exclusive) for which overlapping * features should be returned. * </pre> * * <code>optional int64 end = 5;</code> */ long getEnd(); /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ com.google.protobuf.ProtocolStringList getFeatureTypesList(); /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ int getFeatureTypesCount(); /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ java.lang.String getFeatureTypes(int index); /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ com.google.protobuf.ByteString getFeatureTypesBytes(int index); /** * <pre> * Specifies the maximum number of results to return in a single page. * If unspecified, a system default will be used. * </pre> * * <code>optional int32 page_size = 7;</code> */ int getPageSize(); /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 8;</code> */ java.lang.String getPageToken(); /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 8;</code> */ com.google.protobuf.ByteString getPageTokenBytes(); } /** * <pre> * This request maps to the body of `POST /features/search` as JSON. * </pre> * * Protobuf type {@code ga4gh.SearchFeaturesRequest} */ public static final class SearchFeaturesRequest extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:ga4gh.SearchFeaturesRequest) SearchFeaturesRequestOrBuilder { // Use SearchFeaturesRequest.newBuilder() to construct. private SearchFeaturesRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); } private SearchFeaturesRequest() { featureSetId_ = ""; parentId_ = ""; referenceName_ = ""; start_ = 0L; end_ = 0L; featureTypes_ = com.google.protobuf.LazyStringArrayList.EMPTY; pageSize_ = 0; pageToken_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private SearchFeaturesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { java.lang.String s = input.readStringRequireUtf8(); featureSetId_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); parentId_ = s; break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); referenceName_ = s; break; } case 32: { start_ = input.readInt64(); break; } case 40: { end_ = input.readInt64(); break; } case 50: { java.lang.String s = input.readStringRequireUtf8(); if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { featureTypes_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000020; } featureTypes_.add(s); break; } case 56: { pageSize_ = input.readInt32(); break; } case 66: { java.lang.String s = input.readStringRequireUtf8(); pageToken_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { featureTypes_ = featureTypes_.getUnmodifiableView(); } makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeaturesRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeaturesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest.class, ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest.Builder.class); } private int bitField0_; public static final int FEATURE_SET_ID_FIELD_NUMBER = 1; private volatile java.lang.Object featureSetId_; /** * <pre> * The annotation set to search within. Either `feature_set_id` or * `parent_id` must be non-empty. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public java.lang.String getFeatureSetId() { java.lang.Object ref = featureSetId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); featureSetId_ = s; return s; } } /** * <pre> * The annotation set to search within. Either `feature_set_id` or * `parent_id` must be non-empty. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public com.google.protobuf.ByteString getFeatureSetIdBytes() { java.lang.Object ref = featureSetId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); featureSetId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PARENT_ID_FIELD_NUMBER = 2; private volatile java.lang.Object parentId_; /** * <pre> * Restricts the search to direct children of the given parent `feature` * ID. Either `feature_set_id` or `parent_id` must be non-empty. * </pre> * * <code>optional string parent_id = 2;</code> */ public java.lang.String getParentId() { java.lang.Object ref = parentId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parentId_ = s; return s; } } /** * <pre> * Restricts the search to direct children of the given parent `feature` * ID. Either `feature_set_id` or `parent_id` must be non-empty. * </pre> * * <code>optional string parent_id = 2;</code> */ public com.google.protobuf.ByteString getParentIdBytes() { java.lang.Object ref = parentId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); parentId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REFERENCE_NAME_FIELD_NUMBER = 3; private volatile java.lang.Object referenceName_; /** * <pre> * Only return features on the reference with this name * (matched to literal reference name as imported from the GFF3). * </pre> * * <code>optional string reference_name = 3;</code> */ public java.lang.String getReferenceName() { java.lang.Object ref = referenceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); referenceName_ = s; return s; } } /** * <pre> * Only return features on the reference with this name * (matched to literal reference name as imported from the GFF3). * </pre> * * <code>optional string reference_name = 3;</code> */ public com.google.protobuf.ByteString getReferenceNameBytes() { java.lang.Object ref = referenceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); referenceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int START_FIELD_NUMBER = 4; private long start_; /** * <pre> * Required. The beginning of the window (0-based, inclusive) for which * overlapping features should be returned. Genomic positions are * non-negative integers less than reference length. Requests spanning the * join of circular genomes are represented as two requests one on each side * of the join (position 0). * </pre> * * <code>optional int64 start = 4;</code> */ public long getStart() { return start_; } public static final int END_FIELD_NUMBER = 5; private long end_; /** * <pre> * Required. The end of the window (0-based, exclusive) for which overlapping * features should be returned. * </pre> * * <code>optional int64 end = 5;</code> */ public long getEnd() { return end_; } public static final int FEATURE_TYPES_FIELD_NUMBER = 6; private com.google.protobuf.LazyStringList featureTypes_; /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public com.google.protobuf.ProtocolStringList getFeatureTypesList() { return featureTypes_; } /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public int getFeatureTypesCount() { return featureTypes_.size(); } /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public java.lang.String getFeatureTypes(int index) { return featureTypes_.get(index); } /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public com.google.protobuf.ByteString getFeatureTypesBytes(int index) { return featureTypes_.getByteString(index); } public static final int PAGE_SIZE_FIELD_NUMBER = 7; private int pageSize_; /** * <pre> * Specifies the maximum number of results to return in a single page. * If unspecified, a system default will be used. * </pre> * * <code>optional int32 page_size = 7;</code> */ public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 8; private volatile java.lang.Object pageToken_; /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 8;</code> */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 8;</code> */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getFeatureSetIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 1, featureSetId_); } if (!getParentIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 2, parentId_); } if (!getReferenceNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 3, referenceName_); } if (start_ != 0L) { output.writeInt64(4, start_); } if (end_ != 0L) { output.writeInt64(5, end_); } for (int i = 0; i < featureTypes_.size(); i++) { com.google.protobuf.GeneratedMessage.writeString(output, 6, featureTypes_.getRaw(i)); } if (pageSize_ != 0) { output.writeInt32(7, pageSize_); } if (!getPageTokenBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 8, pageToken_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getFeatureSetIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(1, featureSetId_); } if (!getParentIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(2, parentId_); } if (!getReferenceNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(3, referenceName_); } if (start_ != 0L) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(4, start_); } if (end_ != 0L) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(5, end_); } { int dataSize = 0; for (int i = 0; i < featureTypes_.size(); i++) { dataSize += computeStringSizeNoTag(featureTypes_.getRaw(i)); } size += dataSize; size += 1 * getFeatureTypesList().size(); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(7, pageSize_); } if (!getPageTokenBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(8, pageToken_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * This request maps to the body of `POST /features/search` as JSON. * </pre> * * Protobuf type {@code ga4gh.SearchFeaturesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:ga4gh.SearchFeaturesRequest) ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeaturesRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeaturesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest.class, ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest.Builder.class); } // Construct using ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); featureSetId_ = ""; parentId_ = ""; referenceName_ = ""; start_ = 0L; end_ = 0L; featureTypes_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000020); pageSize_ = 0; pageToken_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeaturesRequest_descriptor; } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest getDefaultInstanceForType() { return ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest.getDefaultInstance(); } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest build() { ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest buildPartial() { ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest result = new ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; result.featureSetId_ = featureSetId_; result.parentId_ = parentId_; result.referenceName_ = referenceName_; result.start_ = start_; result.end_ = end_; if (((bitField0_ & 0x00000020) == 0x00000020)) { featureTypes_ = featureTypes_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000020); } result.featureTypes_ = featureTypes_; result.pageSize_ = pageSize_; result.pageToken_ = pageToken_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest) { return mergeFrom((ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest other) { if (other == ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest.getDefaultInstance()) return this; if (!other.getFeatureSetId().isEmpty()) { featureSetId_ = other.featureSetId_; onChanged(); } if (!other.getParentId().isEmpty()) { parentId_ = other.parentId_; onChanged(); } if (!other.getReferenceName().isEmpty()) { referenceName_ = other.referenceName_; onChanged(); } if (other.getStart() != 0L) { setStart(other.getStart()); } if (other.getEnd() != 0L) { setEnd(other.getEnd()); } if (!other.featureTypes_.isEmpty()) { if (featureTypes_.isEmpty()) { featureTypes_ = other.featureTypes_; bitField0_ = (bitField0_ & ~0x00000020); } else { ensureFeatureTypesIsMutable(); featureTypes_.addAll(other.featureTypes_); } onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object featureSetId_ = ""; /** * <pre> * The annotation set to search within. Either `feature_set_id` or * `parent_id` must be non-empty. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public java.lang.String getFeatureSetId() { java.lang.Object ref = featureSetId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); featureSetId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The annotation set to search within. Either `feature_set_id` or * `parent_id` must be non-empty. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public com.google.protobuf.ByteString getFeatureSetIdBytes() { java.lang.Object ref = featureSetId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); featureSetId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The annotation set to search within. Either `feature_set_id` or * `parent_id` must be non-empty. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public Builder setFeatureSetId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } featureSetId_ = value; onChanged(); return this; } /** * <pre> * The annotation set to search within. Either `feature_set_id` or * `parent_id` must be non-empty. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public Builder clearFeatureSetId() { featureSetId_ = getDefaultInstance().getFeatureSetId(); onChanged(); return this; } /** * <pre> * The annotation set to search within. Either `feature_set_id` or * `parent_id` must be non-empty. * </pre> * * <code>optional string feature_set_id = 1;</code> */ public Builder setFeatureSetIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); featureSetId_ = value; onChanged(); return this; } private java.lang.Object parentId_ = ""; /** * <pre> * Restricts the search to direct children of the given parent `feature` * ID. Either `feature_set_id` or `parent_id` must be non-empty. * </pre> * * <code>optional string parent_id = 2;</code> */ public java.lang.String getParentId() { java.lang.Object ref = parentId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parentId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Restricts the search to direct children of the given parent `feature` * ID. Either `feature_set_id` or `parent_id` must be non-empty. * </pre> * * <code>optional string parent_id = 2;</code> */ public com.google.protobuf.ByteString getParentIdBytes() { java.lang.Object ref = parentId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); parentId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Restricts the search to direct children of the given parent `feature` * ID. Either `feature_set_id` or `parent_id` must be non-empty. * </pre> * * <code>optional string parent_id = 2;</code> */ public Builder setParentId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } parentId_ = value; onChanged(); return this; } /** * <pre> * Restricts the search to direct children of the given parent `feature` * ID. Either `feature_set_id` or `parent_id` must be non-empty. * </pre> * * <code>optional string parent_id = 2;</code> */ public Builder clearParentId() { parentId_ = getDefaultInstance().getParentId(); onChanged(); return this; } /** * <pre> * Restricts the search to direct children of the given parent `feature` * ID. Either `feature_set_id` or `parent_id` must be non-empty. * </pre> * * <code>optional string parent_id = 2;</code> */ public Builder setParentIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parentId_ = value; onChanged(); return this; } private java.lang.Object referenceName_ = ""; /** * <pre> * Only return features on the reference with this name * (matched to literal reference name as imported from the GFF3). * </pre> * * <code>optional string reference_name = 3;</code> */ public java.lang.String getReferenceName() { java.lang.Object ref = referenceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); referenceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Only return features on the reference with this name * (matched to literal reference name as imported from the GFF3). * </pre> * * <code>optional string reference_name = 3;</code> */ public com.google.protobuf.ByteString getReferenceNameBytes() { java.lang.Object ref = referenceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); referenceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Only return features on the reference with this name * (matched to literal reference name as imported from the GFF3). * </pre> * * <code>optional string reference_name = 3;</code> */ public Builder setReferenceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } referenceName_ = value; onChanged(); return this; } /** * <pre> * Only return features on the reference with this name * (matched to literal reference name as imported from the GFF3). * </pre> * * <code>optional string reference_name = 3;</code> */ public Builder clearReferenceName() { referenceName_ = getDefaultInstance().getReferenceName(); onChanged(); return this; } /** * <pre> * Only return features on the reference with this name * (matched to literal reference name as imported from the GFF3). * </pre> * * <code>optional string reference_name = 3;</code> */ public Builder setReferenceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); referenceName_ = value; onChanged(); return this; } private long start_ ; /** * <pre> * Required. The beginning of the window (0-based, inclusive) for which * overlapping features should be returned. Genomic positions are * non-negative integers less than reference length. Requests spanning the * join of circular genomes are represented as two requests one on each side * of the join (position 0). * </pre> * * <code>optional int64 start = 4;</code> */ public long getStart() { return start_; } /** * <pre> * Required. The beginning of the window (0-based, inclusive) for which * overlapping features should be returned. Genomic positions are * non-negative integers less than reference length. Requests spanning the * join of circular genomes are represented as two requests one on each side * of the join (position 0). * </pre> * * <code>optional int64 start = 4;</code> */ public Builder setStart(long value) { start_ = value; onChanged(); return this; } /** * <pre> * Required. The beginning of the window (0-based, inclusive) for which * overlapping features should be returned. Genomic positions are * non-negative integers less than reference length. Requests spanning the * join of circular genomes are represented as two requests one on each side * of the join (position 0). * </pre> * * <code>optional int64 start = 4;</code> */ public Builder clearStart() { start_ = 0L; onChanged(); return this; } private long end_ ; /** * <pre> * Required. The end of the window (0-based, exclusive) for which overlapping * features should be returned. * </pre> * * <code>optional int64 end = 5;</code> */ public long getEnd() { return end_; } /** * <pre> * Required. The end of the window (0-based, exclusive) for which overlapping * features should be returned. * </pre> * * <code>optional int64 end = 5;</code> */ public Builder setEnd(long value) { end_ = value; onChanged(); return this; } /** * <pre> * Required. The end of the window (0-based, exclusive) for which overlapping * features should be returned. * </pre> * * <code>optional int64 end = 5;</code> */ public Builder clearEnd() { end_ = 0L; onChanged(); return this; } private com.google.protobuf.LazyStringList featureTypes_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureFeatureTypesIsMutable() { if (!((bitField0_ & 0x00000020) == 0x00000020)) { featureTypes_ = new com.google.protobuf.LazyStringArrayList(featureTypes_); bitField0_ |= 0x00000020; } } /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public com.google.protobuf.ProtocolStringList getFeatureTypesList() { return featureTypes_.getUnmodifiableView(); } /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public int getFeatureTypesCount() { return featureTypes_.size(); } /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public java.lang.String getFeatureTypes(int index) { return featureTypes_.get(index); } /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public com.google.protobuf.ByteString getFeatureTypesBytes(int index) { return featureTypes_.getByteString(index); } /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public Builder setFeatureTypes( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureFeatureTypesIsMutable(); featureTypes_.set(index, value); onChanged(); return this; } /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public Builder addFeatureTypes( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureFeatureTypesIsMutable(); featureTypes_.add(value); onChanged(); return this; } /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public Builder addAllFeatureTypes( java.lang.Iterable<java.lang.String> values) { ensureFeatureTypesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, featureTypes_); onChanged(); return this; } /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public Builder clearFeatureTypes() { featureTypes_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000020); onChanged(); return this; } /** * <pre> * If specified, this query matches only annotations whose `feature_type` * matches one of the provided ontology terms. * </pre> * * <code>repeated string feature_types = 6;</code> */ public Builder addFeatureTypesBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureFeatureTypesIsMutable(); featureTypes_.add(value); onChanged(); return this; } private int pageSize_ ; /** * <pre> * Specifies the maximum number of results to return in a single page. * If unspecified, a system default will be used. * </pre> * * <code>optional int32 page_size = 7;</code> */ public int getPageSize() { return pageSize_; } /** * <pre> * Specifies the maximum number of results to return in a single page. * If unspecified, a system default will be used. * </pre> * * <code>optional int32 page_size = 7;</code> */ public Builder setPageSize(int value) { pageSize_ = value; onChanged(); return this; } /** * <pre> * Specifies the maximum number of results to return in a single page. * If unspecified, a system default will be used. * </pre> * * <code>optional int32 page_size = 7;</code> */ public Builder clearPageSize() { pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 8;</code> */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 8;</code> */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 8;</code> */ public Builder setPageToken( java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; onChanged(); return this; } /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 8;</code> */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); onChanged(); return this; } /** * <pre> * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * `next_page_token` from the previous response. * </pre> * * <code>optional string page_token = 8;</code> */ public Builder setPageTokenBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:ga4gh.SearchFeaturesRequest) } // @@protoc_insertion_point(class_scope:ga4gh.SearchFeaturesRequest) private static final ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest(); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchFeaturesRequest> PARSER = new com.google.protobuf.AbstractParser<SearchFeaturesRequest>() { public SearchFeaturesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SearchFeaturesRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<SearchFeaturesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchFeaturesRequest> getParserForType() { return PARSER; } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SearchFeaturesResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:ga4gh.SearchFeaturesResponse) com.google.protobuf.MessageOrBuilder { /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ java.util.List<ga4gh.SequenceAnnotations.Feature> getFeaturesList(); /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ ga4gh.SequenceAnnotations.Feature getFeatures(int index); /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ int getFeaturesCount(); /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ java.util.List<? extends ga4gh.SequenceAnnotations.FeatureOrBuilder> getFeaturesOrBuilderList(); /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ ga4gh.SequenceAnnotations.FeatureOrBuilder getFeaturesOrBuilder( int index); /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ java.lang.String getNextPageToken(); /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ com.google.protobuf.ByteString getNextPageTokenBytes(); } /** * <pre> * This is the response from `POST /features/search` expressed as JSON. * </pre> * * Protobuf type {@code ga4gh.SearchFeaturesResponse} */ public static final class SearchFeaturesResponse extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:ga4gh.SearchFeaturesResponse) SearchFeaturesResponseOrBuilder { // Use SearchFeaturesResponse.newBuilder() to construct. private SearchFeaturesResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); } private SearchFeaturesResponse() { features_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private SearchFeaturesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { features_ = new java.util.ArrayList<ga4gh.SequenceAnnotations.Feature>(); mutable_bitField0_ |= 0x00000001; } features_.add(input.readMessage(ga4gh.SequenceAnnotations.Feature.parser(), extensionRegistry)); break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); nextPageToken_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { features_ = java.util.Collections.unmodifiableList(features_); } makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeaturesResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeaturesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse.class, ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse.Builder.class); } private int bitField0_; public static final int FEATURES_FIELD_NUMBER = 1; private java.util.List<ga4gh.SequenceAnnotations.Feature> features_; /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public java.util.List<ga4gh.SequenceAnnotations.Feature> getFeaturesList() { return features_; } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public java.util.List<? extends ga4gh.SequenceAnnotations.FeatureOrBuilder> getFeaturesOrBuilderList() { return features_; } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public int getFeaturesCount() { return features_.size(); } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public ga4gh.SequenceAnnotations.Feature getFeatures(int index) { return features_.get(index); } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public ga4gh.SequenceAnnotations.FeatureOrBuilder getFeaturesOrBuilder( int index) { return features_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; private volatile java.lang.Object nextPageToken_; /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < features_.size(); i++) { output.writeMessage(1, features_.get(i)); } if (!getNextPageTokenBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 2, nextPageToken_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < features_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, features_.get(i)); } if (!getNextPageTokenBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(2, nextPageToken_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * This is the response from `POST /features/search` expressed as JSON. * </pre> * * Protobuf type {@code ga4gh.SearchFeaturesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:ga4gh.SearchFeaturesResponse) ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeaturesResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeaturesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse.class, ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse.Builder.class); } // Construct using ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getFeaturesFieldBuilder(); } } public Builder clear() { super.clear(); if (featuresBuilder_ == null) { features_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { featuresBuilder_.clear(); } nextPageToken_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_SearchFeaturesResponse_descriptor; } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse getDefaultInstanceForType() { return ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse.getDefaultInstance(); } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse build() { ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse buildPartial() { ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse result = new ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (featuresBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { features_ = java.util.Collections.unmodifiableList(features_); bitField0_ = (bitField0_ & ~0x00000001); } result.features_ = features_; } else { result.features_ = featuresBuilder_.build(); } result.nextPageToken_ = nextPageToken_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse) { return mergeFrom((ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse other) { if (other == ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse.getDefaultInstance()) return this; if (featuresBuilder_ == null) { if (!other.features_.isEmpty()) { if (features_.isEmpty()) { features_ = other.features_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureFeaturesIsMutable(); features_.addAll(other.features_); } onChanged(); } } else { if (!other.features_.isEmpty()) { if (featuresBuilder_.isEmpty()) { featuresBuilder_.dispose(); featuresBuilder_ = null; features_ = other.features_; bitField0_ = (bitField0_ & ~0x00000001); featuresBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getFeaturesFieldBuilder() : null; } else { featuresBuilder_.addAllMessages(other.features_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<ga4gh.SequenceAnnotations.Feature> features_ = java.util.Collections.emptyList(); private void ensureFeaturesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { features_ = new java.util.ArrayList<ga4gh.SequenceAnnotations.Feature>(features_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< ga4gh.SequenceAnnotations.Feature, ga4gh.SequenceAnnotations.Feature.Builder, ga4gh.SequenceAnnotations.FeatureOrBuilder> featuresBuilder_; /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public java.util.List<ga4gh.SequenceAnnotations.Feature> getFeaturesList() { if (featuresBuilder_ == null) { return java.util.Collections.unmodifiableList(features_); } else { return featuresBuilder_.getMessageList(); } } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public int getFeaturesCount() { if (featuresBuilder_ == null) { return features_.size(); } else { return featuresBuilder_.getCount(); } } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public ga4gh.SequenceAnnotations.Feature getFeatures(int index) { if (featuresBuilder_ == null) { return features_.get(index); } else { return featuresBuilder_.getMessage(index); } } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public Builder setFeatures( int index, ga4gh.SequenceAnnotations.Feature value) { if (featuresBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFeaturesIsMutable(); features_.set(index, value); onChanged(); } else { featuresBuilder_.setMessage(index, value); } return this; } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public Builder setFeatures( int index, ga4gh.SequenceAnnotations.Feature.Builder builderForValue) { if (featuresBuilder_ == null) { ensureFeaturesIsMutable(); features_.set(index, builderForValue.build()); onChanged(); } else { featuresBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public Builder addFeatures(ga4gh.SequenceAnnotations.Feature value) { if (featuresBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFeaturesIsMutable(); features_.add(value); onChanged(); } else { featuresBuilder_.addMessage(value); } return this; } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public Builder addFeatures( int index, ga4gh.SequenceAnnotations.Feature value) { if (featuresBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFeaturesIsMutable(); features_.add(index, value); onChanged(); } else { featuresBuilder_.addMessage(index, value); } return this; } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public Builder addFeatures( ga4gh.SequenceAnnotations.Feature.Builder builderForValue) { if (featuresBuilder_ == null) { ensureFeaturesIsMutable(); features_.add(builderForValue.build()); onChanged(); } else { featuresBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public Builder addFeatures( int index, ga4gh.SequenceAnnotations.Feature.Builder builderForValue) { if (featuresBuilder_ == null) { ensureFeaturesIsMutable(); features_.add(index, builderForValue.build()); onChanged(); } else { featuresBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public Builder addAllFeatures( java.lang.Iterable<? extends ga4gh.SequenceAnnotations.Feature> values) { if (featuresBuilder_ == null) { ensureFeaturesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, features_); onChanged(); } else { featuresBuilder_.addAllMessages(values); } return this; } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public Builder clearFeatures() { if (featuresBuilder_ == null) { features_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { featuresBuilder_.clear(); } return this; } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public Builder removeFeatures(int index) { if (featuresBuilder_ == null) { ensureFeaturesIsMutable(); features_.remove(index); onChanged(); } else { featuresBuilder_.remove(index); } return this; } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public ga4gh.SequenceAnnotations.Feature.Builder getFeaturesBuilder( int index) { return getFeaturesFieldBuilder().getBuilder(index); } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public ga4gh.SequenceAnnotations.FeatureOrBuilder getFeaturesOrBuilder( int index) { if (featuresBuilder_ == null) { return features_.get(index); } else { return featuresBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public java.util.List<? extends ga4gh.SequenceAnnotations.FeatureOrBuilder> getFeaturesOrBuilderList() { if (featuresBuilder_ != null) { return featuresBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(features_); } } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public ga4gh.SequenceAnnotations.Feature.Builder addFeaturesBuilder() { return getFeaturesFieldBuilder().addBuilder( ga4gh.SequenceAnnotations.Feature.getDefaultInstance()); } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public ga4gh.SequenceAnnotations.Feature.Builder addFeaturesBuilder( int index) { return getFeaturesFieldBuilder().addBuilder( index, ga4gh.SequenceAnnotations.Feature.getDefaultInstance()); } /** * <pre> * The list of matching annotations, sorted by start position. Annotations * which share a start position are returned in a deterministic order. * </pre> * * <code>repeated .ga4gh.Feature features = 1;</code> */ public java.util.List<ga4gh.SequenceAnnotations.Feature.Builder> getFeaturesBuilderList() { return getFeaturesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< ga4gh.SequenceAnnotations.Feature, ga4gh.SequenceAnnotations.Feature.Builder, ga4gh.SequenceAnnotations.FeatureOrBuilder> getFeaturesFieldBuilder() { if (featuresBuilder_ == null) { featuresBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< ga4gh.SequenceAnnotations.Feature, ga4gh.SequenceAnnotations.Feature.Builder, ga4gh.SequenceAnnotations.FeatureOrBuilder>( features_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); features_ = null; } return featuresBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public Builder setNextPageToken( java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; onChanged(); return this; } /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); onChanged(); return this; } /** * <pre> * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. This field will be empty if there aren't any additional results. * </pre> * * <code>optional string next_page_token = 2;</code> */ public Builder setNextPageTokenBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:ga4gh.SearchFeaturesResponse) } // @@protoc_insertion_point(class_scope:ga4gh.SearchFeaturesResponse) private static final ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse(); } public static ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchFeaturesResponse> PARSER = new com.google.protobuf.AbstractParser<SearchFeaturesResponse>() { public SearchFeaturesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SearchFeaturesResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<SearchFeaturesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchFeaturesResponse> getParserForType() { return PARSER; } public ga4gh.SequenceAnnotationServiceOuterClass.SearchFeaturesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetFeatureRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:ga4gh.GetFeatureRequest) com.google.protobuf.MessageOrBuilder { /** * <pre> * The ID of the `Feature` to be retrieved. * </pre> * * <code>optional string feature_id = 1;</code> */ java.lang.String getFeatureId(); /** * <pre> * The ID of the `Feature` to be retrieved. * </pre> * * <code>optional string feature_id = 1;</code> */ com.google.protobuf.ByteString getFeatureIdBytes(); } /** * <pre> * This request maps to the URL `GET /features/{id}`. * </pre> * * Protobuf type {@code ga4gh.GetFeatureRequest} */ public static final class GetFeatureRequest extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:ga4gh.GetFeatureRequest) GetFeatureRequestOrBuilder { // Use GetFeatureRequest.newBuilder() to construct. private GetFeatureRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); } private GetFeatureRequest() { featureId_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private GetFeatureRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { java.lang.String s = input.readStringRequireUtf8(); featureId_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_GetFeatureRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_GetFeatureRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest.class, ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest.Builder.class); } public static final int FEATURE_ID_FIELD_NUMBER = 1; private volatile java.lang.Object featureId_; /** * <pre> * The ID of the `Feature` to be retrieved. * </pre> * * <code>optional string feature_id = 1;</code> */ public java.lang.String getFeatureId() { java.lang.Object ref = featureId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); featureId_ = s; return s; } } /** * <pre> * The ID of the `Feature` to be retrieved. * </pre> * * <code>optional string feature_id = 1;</code> */ public com.google.protobuf.ByteString getFeatureIdBytes() { java.lang.Object ref = featureId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); featureId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getFeatureIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 1, featureId_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getFeatureIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(1, featureId_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * This request maps to the URL `GET /features/{id}`. * </pre> * * Protobuf type {@code ga4gh.GetFeatureRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:ga4gh.GetFeatureRequest) ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_GetFeatureRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_GetFeatureRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest.class, ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest.Builder.class); } // Construct using ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); featureId_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return ga4gh.SequenceAnnotationServiceOuterClass.internal_static_ga4gh_GetFeatureRequest_descriptor; } public ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest getDefaultInstanceForType() { return ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest.getDefaultInstance(); } public ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest build() { ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest buildPartial() { ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest result = new ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest(this); result.featureId_ = featureId_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest) { return mergeFrom((ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest other) { if (other == ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest.getDefaultInstance()) return this; if (!other.getFeatureId().isEmpty()) { featureId_ = other.featureId_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object featureId_ = ""; /** * <pre> * The ID of the `Feature` to be retrieved. * </pre> * * <code>optional string feature_id = 1;</code> */ public java.lang.String getFeatureId() { java.lang.Object ref = featureId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); featureId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The ID of the `Feature` to be retrieved. * </pre> * * <code>optional string feature_id = 1;</code> */ public com.google.protobuf.ByteString getFeatureIdBytes() { java.lang.Object ref = featureId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); featureId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The ID of the `Feature` to be retrieved. * </pre> * * <code>optional string feature_id = 1;</code> */ public Builder setFeatureId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } featureId_ = value; onChanged(); return this; } /** * <pre> * The ID of the `Feature` to be retrieved. * </pre> * * <code>optional string feature_id = 1;</code> */ public Builder clearFeatureId() { featureId_ = getDefaultInstance().getFeatureId(); onChanged(); return this; } /** * <pre> * The ID of the `Feature` to be retrieved. * </pre> * * <code>optional string feature_id = 1;</code> */ public Builder setFeatureIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); featureId_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:ga4gh.GetFeatureRequest) } // @@protoc_insertion_point(class_scope:ga4gh.GetFeatureRequest) private static final ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest(); } public static ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GetFeatureRequest> PARSER = new com.google.protobuf.AbstractParser<GetFeatureRequest>() { public GetFeatureRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetFeatureRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<GetFeatureRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GetFeatureRequest> getParserForType() { return PARSER; } public ga4gh.SequenceAnnotationServiceOuterClass.GetFeatureRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private static final com.google.protobuf.Descriptors.Descriptor internal_static_ga4gh_SearchFeatureSetsRequest_descriptor; private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ga4gh_SearchFeatureSetsRequest_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_ga4gh_SearchFeatureSetsResponse_descriptor; private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ga4gh_SearchFeatureSetsResponse_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_ga4gh_GetFeatureSetRequest_descriptor; private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ga4gh_GetFeatureSetRequest_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_ga4gh_SearchFeaturesRequest_descriptor; private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ga4gh_SearchFeaturesRequest_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_ga4gh_SearchFeaturesResponse_descriptor; private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ga4gh_SearchFeaturesResponse_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_ga4gh_GetFeatureRequest_descriptor; private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ga4gh_GetFeatureRequest_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\'ga4gh/sequence_annotation_service.prot" + "o\022\005ga4gh\032 ga4gh/sequence_annotations.pro" + "to\"U\n\030SearchFeatureSetsRequest\022\022\n\ndatase" + "t_id\030\001 \001(\t\022\021\n\tpage_size\030\002 \001(\005\022\022\n\npage_to" + "ken\030\003 \001(\t\"]\n\031SearchFeatureSetsResponse\022\'" + "\n\014feature_sets\030\001 \003(\0132\021.ga4gh.FeatureSet\022" + "\027\n\017next_page_token\030\002 \001(\t\".\n\024GetFeatureSe" + "tRequest\022\026\n\016feature_set_id\030\001 \001(\t\"\264\001\n\025Sea" + "rchFeaturesRequest\022\026\n\016feature_set_id\030\001 \001" + "(\t\022\021\n\tparent_id\030\002 \001(\t\022\026\n\016reference_name\030", "\003 \001(\t\022\r\n\005start\030\004 \001(\003\022\013\n\003end\030\005 \001(\003\022\025\n\rfea" + "ture_types\030\006 \003(\t\022\021\n\tpage_size\030\007 \001(\005\022\022\n\np" + "age_token\030\010 \001(\t\"S\n\026SearchFeaturesRespons" + "e\022 \n\010features\030\001 \003(\0132\016.ga4gh.Feature\022\027\n\017n" + "ext_page_token\030\002 \001(\t\"\'\n\021GetFeatureReques" + "t\022\022\n\nfeature_id\030\001 \001(\t2\273\002\n\031SequenceAnnota" + "tionService\022V\n\021SearchFeatureSets\022\037.ga4gh" + ".SearchFeatureSetsRequest\032 .ga4gh.Search" + "FeatureSetsResponse\022?\n\rGetFeatureSet\022\033.g" + "a4gh.GetFeatureSetRequest\032\021.ga4gh.Featur", "eSet\022M\n\016SearchFeatures\022\034.ga4gh.SearchFea" + "turesRequest\032\035.ga4gh.SearchFeaturesRespo" + "nse\0226\n\nGetFeature\022\030.ga4gh.GetFeatureRequ" + "est\032\016.ga4gh.Featureb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { ga4gh.SequenceAnnotations.getDescriptor(), }, assigner); internal_static_ga4gh_SearchFeatureSetsRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_ga4gh_SearchFeatureSetsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ga4gh_SearchFeatureSetsRequest_descriptor, new java.lang.String[] { "DatasetId", "PageSize", "PageToken", }); internal_static_ga4gh_SearchFeatureSetsResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_ga4gh_SearchFeatureSetsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ga4gh_SearchFeatureSetsResponse_descriptor, new java.lang.String[] { "FeatureSets", "NextPageToken", }); internal_static_ga4gh_GetFeatureSetRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_ga4gh_GetFeatureSetRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ga4gh_GetFeatureSetRequest_descriptor, new java.lang.String[] { "FeatureSetId", }); internal_static_ga4gh_SearchFeaturesRequest_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_ga4gh_SearchFeaturesRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ga4gh_SearchFeaturesRequest_descriptor, new java.lang.String[] { "FeatureSetId", "ParentId", "ReferenceName", "Start", "End", "FeatureTypes", "PageSize", "PageToken", }); internal_static_ga4gh_SearchFeaturesResponse_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_ga4gh_SearchFeaturesResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ga4gh_SearchFeaturesResponse_descriptor, new java.lang.String[] { "Features", "NextPageToken", }); internal_static_ga4gh_GetFeatureRequest_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_ga4gh_GetFeatureRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ga4gh_GetFeatureRequest_descriptor, new java.lang.String[] { "FeatureId", }); ga4gh.SequenceAnnotations.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
opencb/ga4gh
src/main/java/ga4gh/SequenceAnnotationServiceOuterClass.java
Java
apache-2.0
192,853
// Copyright (c) 2002-2019 "Neo4j," // Neo4j Sweden AB [http://neo4j.com] // // This file is part of Neo4j. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections; using FluentAssertions; using Neo4j.Driver.Internal; using Neo4j.Driver; using Xunit; namespace Neo4j.Driver.Tests.Types { public class ZonedDateTimeWithZoneIdTests { [Fact] public void ShouldCreateDateTimeWithZoneIdWithDateTimeComponents() { var cypherDateTime = new ZonedDateTime(1947, 12, 17, 23, 49, 54, Zone.Of("Europe/Rome")); cypherDateTime.Year.Should().Be(1947); cypherDateTime.Month.Should().Be(12); cypherDateTime.Day.Should().Be(17); cypherDateTime.Hour.Should().Be(23); cypherDateTime.Minute.Should().Be(49); cypherDateTime.Second.Should().Be(54); cypherDateTime.Nanosecond.Should().Be(0); cypherDateTime.OffsetSeconds.Should().Be(60 * 60); cypherDateTime.Zone.Should().Be(Zone.Of("Europe/Rome")); } [Fact] public void ShouldCreateDateTimeWithZoneIdWithDateTimeComponentsWithNanoseconds() { var cypherDateTime = new ZonedDateTime(1947, 12, 17, 23, 49, 54, 192794500, Zone.Of("Europe/Rome")); cypherDateTime.Year.Should().Be(1947); cypherDateTime.Month.Should().Be(12); cypherDateTime.Day.Should().Be(17); cypherDateTime.Hour.Should().Be(23); cypherDateTime.Minute.Should().Be(49); cypherDateTime.Second.Should().Be(54); cypherDateTime.Nanosecond.Should().Be(192794500); cypherDateTime.OffsetSeconds.Should().Be(60 * 60); cypherDateTime.Zone.Should().Be(Zone.Of("Europe/Rome")); } [Fact] public void ShouldCreateDateTimeWithZoneIdWithDateTime() { var dateTime = new DateTime(1947, 12, 17, 23, 49, 54, 120); var cypherDateTime = new ZonedDateTime(dateTime, "Europe/Rome"); cypherDateTime.Year.Should().Be(1947); cypherDateTime.Month.Should().Be(12); cypherDateTime.Day.Should().Be(17); cypherDateTime.Hour.Should().Be(23); cypherDateTime.Minute.Should().Be(49); cypherDateTime.Second.Should().Be(54); cypherDateTime.Nanosecond.Should().Be(120000000); cypherDateTime.OffsetSeconds.Should().Be(60 * 60); cypherDateTime.Zone.Should().Be(Zone.Of("Europe/Rome")); } [Theory] [InlineData(-1000000000)] [InlineData(1000000000)] public void ShouldThrowOnInvalidYear(int year) { var ex = Record.Exception(() => new ZonedDateTime(year, 1, 1, 0, 0, 0, Zone.Of("Europe/Amsterdam"))); ex.Should().NotBeNull().And.BeOfType<ArgumentOutOfRangeException>(); } [Theory] [InlineData(0)] [InlineData(13)] public void ShouldThrowOnInvalidMonth(int month) { var ex = Record.Exception(() => new ZonedDateTime(1990, month, 1, 0, 0, 0, Zone.Of("Europe/Istanbul"))); ex.Should().NotBeNull().And.BeOfType<ArgumentOutOfRangeException>(); } [Theory] [InlineData(2018, 1, 0)] [InlineData(2018, 1, 32)] [InlineData(2018, 6, 31)] [InlineData(2018, 2, 29)] [InlineData(2018, 12, -1)] public void ShouldThrowOnInvalidDay(int year, int month, int day) { var ex = Record.Exception(() => new ZonedDateTime(year, month, day, 0, 0, 0, Zone.Of("Europe/Istanbul"))); ex.Should().NotBeNull().And.BeOfType<ArgumentOutOfRangeException>(); } [Theory] [InlineData(-1)] [InlineData(24)] public void ShouldThrowOnInvalidHour(int hour) { var ex = Record.Exception(() => new ZonedDateTime(1990, 1, 1, hour, 0, 0, Zone.Of("Europe/Istanbul"))); ex.Should().NotBeNull().And.BeOfType<ArgumentOutOfRangeException>(); } [Theory] [InlineData(-1)] [InlineData(60)] [InlineData(61)] public void ShouldThrowOnInvalidMinute(int minute) { var ex = Record.Exception(() => new ZonedDateTime(1990, 1, 1, 0, minute, 0, Zone.Of("Europe/Paris"))); ex.Should().NotBeNull().And.BeOfType<ArgumentOutOfRangeException>(); } [Theory] [InlineData(-1)] [InlineData(60)] [InlineData(61)] public void ShouldThrowOnInvalidSecond(int second) { var ex = Record.Exception(() => new ZonedDateTime(1990, 1, 1, 0, 0, second, Zone.Of("Europe/Rome"))); ex.Should().NotBeNull().And.BeOfType<ArgumentOutOfRangeException>(); } [Theory] [InlineData(-1)] [InlineData(999_999_999 + 1)] public void ShouldThrowOnInvalidNanosecond(int nanosecond) { var ex = Record.Exception(() => new ZonedDateTime(1990, 1, 1, 0, 0, 0, nanosecond, Zone.Of("Europe/Athens"))); ex.Should().NotBeNull().And.BeOfType<ArgumentOutOfRangeException>(); } [Theory] [InlineData(-9999)] [InlineData(-1)] [InlineData(0)] [InlineData(10000)] [InlineData(9999999)] public void ShouldThrowOnOverflow(int year) { var dateTime = new ZonedDateTime(year, 1, 1, 0, 0, 0, 0, Zone.Of("Europe/London")); var ex = Record.Exception(() => dateTime.ToDateTimeOffset()); ex.Should().NotBeNull().And.BeOfType<ValueOverflowException>(); } [Theory] [InlineData(1)] [InlineData(20)] [InlineData(99)] [InlineData(999000727)] [InlineData(999000750)] [InlineData(999000001)] public void ShouldThrowOnTruncation(int nanosecond) { var dateTime = new ZonedDateTime(1, 1, 1, 0, 0, 0, nanosecond, Zone.Of("Europe/London")); var ex = Record.Exception(() => dateTime.ToDateTimeOffset()); ex.Should().NotBeNull().And.BeOfType<ValueTruncationException>(); } [Theory] [InlineData(1947, 12, 17, 23, 5, 54, 192794500, "Europe/Rome", "1947-12-17T23:05:54.192794500[Europe/Rome]")] [InlineData(1947, 12, 5, 0, 5, 54, 192794500, "Europe/Amsterdam", "1947-12-05T00:05:54.192794500[Europe/Amsterdam]")] [InlineData(1947, 12, 5, 0, 5, 54, 0, "Europe/Istanbul", "1947-12-05T00:05:54[Europe/Istanbul]")] [InlineData(5, 1, 5, 0, 5, 54, 0, "Africa/Nairobi", "0005-01-05T00:05:54[Africa/Nairobi]")] [InlineData(-5, 1, 5, 0, 5, 54, 1250, "America/Halifax", "-0005-01-05T00:05:54.000001250[America/Halifax]")] [InlineData(999999, 1, 1, 5, 1, 25, 1, "America/New_York", "999999-01-01T05:01:25.000000001[America/New_York]")] [InlineData(-999999, 1, 1, 5, 1, 25, 1, "Asia/Seoul", "-999999-01-01T05:01:25.000000001[Asia/Seoul]")] public void ShouldGenerateCorrectString(int year, int month, int day, int hour, int minute, int second, int nanosecond, string zoneId, string expected) { var cypherDateTime = new ZonedDateTime(year, month, day, hour, minute, second, nanosecond, Zone.Of(zoneId)); var cypherDateTimeStr = cypherDateTime.ToString(); cypherDateTimeStr.Should().Be(expected); } [Fact] public void ShouldGenerateSameHashcode() { var dateTime1 = new ZonedDateTime(1947, 12, 17, 15, 12, 01, 789000000, Zone.Of("Europe/Rome")); var dateTime2 = new ZonedDateTime(new DateTime(1947, 12, 17, 15, 12, 01, 789), "Europe/Rome"); dateTime1.GetHashCode().Should().Be(dateTime2.GetHashCode()); } [Fact] public void ShouldGenerateDifferentHashcode() { var dateTime1 = new ZonedDateTime(1947, 12, 17, 15, 12, 01, 789000000, Zone.Of("Europe/Rome")); var dateTime2 = new ZonedDateTime(new DateTime(1947, 12, 17, 15, 12, 01, 790), "Europe/Rome"); dateTime1.GetHashCode().Should().NotBe(dateTime2.GetHashCode()); } [Fact] public void ShouldBeEqual() { var dateTime1 = new ZonedDateTime(1947, 12, 17, 15, 12, 01, 789000000, Zone.Of("Europe/Rome")); var dateTime2 = new ZonedDateTime(new DateTime(1947, 12, 17, 15, 12, 01, 789), "Europe/Rome"); dateTime1.Should().Be(dateTime2); } [Fact] public void ShouldNotBeEqual() { var dateTime1 = new ZonedDateTime(1947, 12, 17, 15, 12, 01, 789000000, Zone.Of("Europe/Rome")); var dateTime2 = new ZonedDateTime(new DateTime(1947, 12, 17, 15, 12, 01, 790), "Europe/Rome"); dateTime1.Should().NotBe(dateTime2); } [Fact] public void ShouldNotBeEqualToAnotherType() { var dateTime = new ZonedDateTime(1947, 12, 17, 15, 12, 01, 789000000, Zone.Of("Europe/Rome")); var other = "some string"; dateTime.Equals(other).Should().BeFalse(); } [Fact] public void ShouldNotBeEqualToNull() { var dateTime = new ZonedDateTime(1947, 12, 17, 15, 12, 01, 789000000, Zone.Of("Europe/Rome")); var other = (object)null; dateTime.Equals(other).Should().BeFalse(); } [Fact] public void ShouldThrowOnCompareToOtherType() { var dateTime1 = new ZonedDateTime(1947, 12, 17, 0, 0, 0, 0, Zone.Of("Europe/Amsterdam")); var ex = Record.Exception(() => dateTime1.CompareTo(new DateTime(1947, 12, 17))); ex.Should().NotBeNull().And.BeOfType<ArgumentException>(); } [Fact] public void ShouldReportLargerOnCompareToNull() { var dateTime1 = new ZonedDateTime(1947, 12, 17, 0, 0, 0, 0, Zone.Of("Europe/Amsterdam")); var comp = dateTime1.CompareTo(null); comp.Should().BeGreaterThan(0); } [Fact] public void ShouldReportLargerOnCompareTo() { var dateTime1 = new ZonedDateTime(1947, 12, 17, 0, 0, 0, 0, Zone.Of("Europe/Amsterdam")); var dateTime2 = new ZonedDateTime(1947, 12, 16, 23, 59, 59, 999999900, Zone.Of("Europe/Amsterdam")); var comp = dateTime1.CompareTo(dateTime2); comp.Should().BeGreaterThan(0); } [Fact] public void ShouldReportLargerOnCompareToDiffOffset() { var dateTime1 = new ZonedDateTime(1947, 12, 17, 23, 59, 59, 999999900, Zone.Of("Europe/Amsterdam")); var dateTime2 = new ZonedDateTime(1947, 12, 16, 23, 59, 59, 999999900, Zone.Of("Europe/London")); var comp = dateTime1.CompareTo(dateTime2); comp.Should().BeGreaterThan(0); } [Fact] public void ShouldReportEqualOnCompareTo() { var dateTime1 = new ZonedDateTime(1947, 12, 16, 23, 59, 59, 999999900, Zone.Of("Europe/Amsterdam")); var dateTime2 = new ZonedDateTime(1947, 12, 16, 23, 59, 59, 999999900, Zone.Of("Europe/Amsterdam")); var comp = dateTime1.CompareTo(dateTime2); comp.Should().Be(0); } [Fact] public void ShouldReportEqualOnCompareToDiffOffset() { var dateTime1 = new ZonedDateTime(1947, 12, 16, 23, 59, 59, 999999900, Zone.Of("Europe/London")); var dateTime2 = new ZonedDateTime(1947, 12, 17, 0, 59, 59, 999999900, Zone.Of("Europe/Amsterdam")); var comp = dateTime1.CompareTo(dateTime2); comp.Should().Be(0); } [Fact] public void ShouldReportSmallerOnCompareTo() { var dateTime1 = new ZonedDateTime(1947, 12, 16, 23, 59, 59, 999999900, Zone.Of("Europe/Amsterdam")); var dateTime2 = new ZonedDateTime(1947, 12, 17, 0, 59, 59, 999999900, Zone.Of("Europe/Amsterdam")); var comp = dateTime1.CompareTo(dateTime2); comp.Should().BeLessThan(0); } [Fact] public void ShouldReportSmallerOnCompareToDiffOffset() { var dateTime1 = new ZonedDateTime(1947, 12, 16, 23, 59, 59, 999999900, Zone.Of("Europe/Amsterdam")); var dateTime2 = new ZonedDateTime(1947, 12, 16, 23, 59, 59, 999999900, Zone.Of("Europe/London")); var comp = dateTime1.CompareTo(dateTime2); comp.Should().BeLessThan(0); } [Fact] public void ShouldBeConvertableToDateTimeOffset() { var date = new DateTime(1947, 12, 16, 12, 15, 59, 660); var date1 = new ZonedDateTime(date, "Europe/Rome"); var date2 = Convert.ChangeType(date1, typeof(DateTimeOffset)); date2.Should().Be(new DateTimeOffset(date, TimeSpan.FromSeconds(3600))); } [Fact] public void ShouldBeConvertableToString() { var date = new ZonedDateTime(1947, 12, 16, 12, 15, 59, 660000999, Zone.Of(3600)); var dateStr1 = Convert.ToString(date); var dateStr2 = Convert.ChangeType(date, typeof(string)); dateStr1.Should().Be("1947-12-16T12:15:59.660000999+01:00"); dateStr2.Should().Be("1947-12-16T12:15:59.660000999+01:00"); } [Fact] public void ShouldThrowWhenConversionIsNotSupported() { var date = new ZonedDateTime(1947, 12, 16, 12, 15, 59, 660000999, Zone.Of("America/Dominica")); var conversions = new Action[] { () => Convert.ToBoolean(date), () => Convert.ToDateTime(date), () => Convert.ToByte(date), () => Convert.ToChar(date), () => Convert.ToDecimal(date), () => Convert.ToDouble(date), () => Convert.ToInt16(date), () => Convert.ToInt32(date), () => Convert.ToInt64(date), () => Convert.ToSByte(date), () => Convert.ToUInt16(date), () => Convert.ToUInt32(date), () => Convert.ToUInt64(date), () => Convert.ToSingle(date), () => Convert.ChangeType(date, typeof(ArrayList)) }; foreach (var testAction in conversions) { testAction.Should().Throw<InvalidCastException>(); } } } }
ali-ince/neo4j-dotnet-driver
Neo4j.Driver/Neo4j.Driver.Tests/Types/ZonedDateTimeWithZoneIdTests.cs
C#
apache-2.0
15,028
// Copyright © 2017-2018 Andy Goryachev <andy@goryachev.com> package goryachev.findfiles; import goryachev.common.util.CList; import java.util.List; /** * Text Model With Highlights. */ public class TextModelWithHighlights extends StyledTextModel { private final CList<Integer> highlightedLines; public TextModelWithHighlights(List<StyledTextModel.Line> lines, List<Integer> highlightedLines) { super(lines); this.highlightedLines = new CList(highlightedLines); } public List<Integer> getHighlights() { return highlightedLines; } }
andy-goryachev/FindFiles
src/goryachev/findfiles/TextModelWithHighlights.java
Java
apache-2.0
589
## How to read barcodes from subfolder with barcode reader sdk in Delphi and ByteScout Premium Suite ### How to write a robust code in Delphi to read barcodes from subfolder with barcode reader sdk with this step-by-step tutorial On this page you will learn from code samples for programming in Delphi.Writing of the code to read barcodes from subfolder with barcode reader sdk in Delphi can be executed by programmers of any level using ByteScout Premium Suite. ByteScout Premium Suite: the bundle that includes twelve SDK products from ByteScout including tools and components for PDF, barcodes, spreadsheets, screen video recording. It can read barcodes from subfolder with barcode reader sdk in Delphi. Want to save time? You will save a lot of time on writing and testing code as you may just take the Delphi code from ByteScout Premium Suite for read barcodes from subfolder with barcode reader sdk below and use it in your application. IF you want to implement the functionality, just copy and paste this code for Delphi below into your code editor with your app, compile and run your application. Want to see how it works with your data then code testing will allow the function to be tested and work properly. ByteScout Premium Suite free trial version is available on our website. Delphi and other programming languages are supported. ## REQUEST FREE TECH SUPPORT [Click here to get in touch](https://bytescout.zendesk.com/hc/en-us/requests/new?subject=ByteScout%20Premium%20Suite%20Question) or just send email to [support@bytescout.com](mailto:support@bytescout.com?subject=ByteScout%20Premium%20Suite%20Question) ## ON-PREMISE OFFLINE SDK [Get Your 60 Day Free Trial](https://bytescout.com/download/web-installer?utm_source=github-readme) [Explore SDK Docs](https://bytescout.com/documentation/index.html?utm_source=github-readme) [Sign Up For Online Training](https://academy.bytescout.com/) ## ON-DEMAND REST WEB API [Get your API key](https://pdf.co/documentation/api?utm_source=github-readme) [Explore Web API Documentation](https://pdf.co/documentation/api?utm_source=github-readme) [Explore Web API Samples](https://github.com/bytescout/ByteScout-SDK-SourceCode/tree/master/PDF.co%20Web%20API) ## VIDEO REVIEW [https://www.youtube.com/watch?v=NEwNs2b9YN8](https://www.youtube.com/watch?v=NEwNs2b9YN8) <!-- code block begin --> ##### ****Project1.dpr:** ``` //******************************************************************* // ByteScout Barcode Reader SDK // // Copyright � 2016 ByteScout - http://www.bytescout.com // ALL RIGHTS RESERVED // //******************************************************************* program Project1; {$APPTYPE CONSOLE} { Sample that shows reading of barcodes from images in /Images/ subfolder } uses SysUtils, ComObj, ActiveX; var reader: Variant; SR: TSearchRec; arrayOutput: Variant; varItem: Variant; i: integer; pathWithImages, imageType: string; begin CoInitialize(nil); // Disable floating point exception to conform to .NET floating point operations behavior. System.Set8087CW( <!-- code block begin --> ##### ****{codeFileName}:** ``` {code} ``` <!-- code block end --> 33f); // Create and initialize Bytescout.BarCodeReader.Reader object reader := CreateOleObject('Bytescout.BarCodeReader.Reader'); reader.RegistrationName := 'demo'; reader.RegistrationKey := 'demo'; // Enable Code 39 decoding, you may also enable other types (dozens of types supported) // see .BarCodeTypesToFind for more values. reader.BarcodeTypesToFind.Code39 := true; // For example to enable Code128 uncomment line below // reader.BarcodeTypesToFind.Code128 := true; // For example to enable QR Code uncomment line below // reader.BarcodeTypesToFind.QRCode := true; // Setup possible barcode orientations ( ato support rotated images) // see BarCode Reader SDK - OrientationType enum for more variations // uncomment to support more orientations like right to left { reader.Orientation := 1 or //OrientationType.HorizontalFromLeftToRight 16 or // OrientationType.HorizontalFromRightToLeft 32768 or // OrientationType.VerticalFromBottomToTop 2; // OrientationType.VerticalFromTopToBottom; } // uncomment to enable processing of negative barcodes (white bars on dark background) // reader.SearchNegative := true; // uncomment to Enable Heuristic Mode (try different image processing modes if the default failed). // Helps on photos with poor lighting conditions. //reader.HeuristicMode := true; // define subfolder where we have images pathWithImages := '.\Images\'; imageType := '*.jpg'; // can also be *.png, *.tiff, *.bmp, *.PDF // now search for files with images and processing them one by one if FindFirst(pathWithImages + imageType, faAnyFile, SR) = 0 then begin repeat if (SR.Attr <> faDirectory) then begin writeLn(SR.Name); writeLn('------------------'); // processing the file reader.ReadFromFile(pathWithImages + SR.Name); for i := 0 to reader.FoundCount - 1 do begin writeLn( '#' + IntToStr(i) + ' barcode with value ' + reader.GetFoundBarcodeValue(i)+ ' at ' + FloatToStr(reader.GetFoundBarCodeLeft(i)) + ',' + FloatToStr(reader.GetFoundBarCodeTop(i)) ); end; writeLn('------------------'); end; until FindNext(SR) <> 0; FindClose(SR); end; WriteLn('Press any key to exit...'); ReadLn; reader := varEmpty; end. ``` <!-- code block end -->
bytescout/ByteScout-SDK-SourceCode
Premium Suite/Delphi/Read barcodes from subfolder with barcode reader sdk/README.md
Markdown
apache-2.0
5,900
all: rebar3 compile clean: rebar3 clean && rm -rf _build
rabbitmq/rabbitmq-tutorials
erlang/Makefile
Makefile
apache-2.0
62
[![Docker Stars](https://img.shields.io/docker/stars/capitalone/hygieia-ui.svg)](https://hub.docker.com/r/capitalone/hygieia-api/) [![Docker Stars](https://img.shields.io/docker/pulls/capitalone/hygieia-ui.svg)](https://hub.docker.com/r/capitalone/hygieia-api/) ## Hygieia℠ UI ### Requirements - NodeJS - npm - gulp - bower #### Mac OS X ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" brew install node npm install -g bower npm install -g gulp Pull down everything that's configured with bower and npm. I think it's: npm install bower install Will need up update the ngFitText bower.json file to point to 'src/ng-FitText.js' instead of '/src/ng-FitText.js' #### Windows Install NodeJS using the MSI package available at: http://nodejs.org/download/ Issue the following commands via command line: npm install -g bower npm install -g gulp Navigate to your project root via command line and use the following command: npm install Use Git Shell to install bower in the following manner; do so from your project's root directory: bower install select option 2 when prompted for user input Run the dashboard from the following command: gulp serve ### Layouts Are under src/components/templates. Currently only capone is used. Just add ```<widget name="[your new widget name]"></widget>``` and you're good to go. All widgets have to be hardcoded into the layout right now. ### Running In terminal navigate to the project root and run ```gulp serve```. Should serve up on port 3000. Local Testing with Mocks: ```bash gulp serve --local true ``` Using browser-sync's [`ghostMode`](https://www.browsersync.io/docs/options#option-ghostMode) functionality: ```bash gulp serve:ghost-mode ``` or you can run via maven from UI project root folder ```bash mvn clean package integration-test ``` for local testing of Hygieia UI layer All data is currently coming from the test-data folder so you shouldn't need an api, but also means no settings will be saved.. ### Docker #### Create ```bash # from top-level project mvn clean package -pl UI docker:build ``` #### Run ```bash docker run -t -p 8088:80 --link hygieia-api -i hygieia-ui:latest ``` ### API server running on a custom port If the API server is running on a port other than the default (`8080`) then modify `UI/gulpfile.js` to include the custom port: ``` // Using port 8888 for the API server instead of the default (8080) var proxyTarget = config.api || 'http://localhost:8888'; ``` ### API check #### API layer successfully connected ![Image](/media/images/apiup.png) #### API layer connection unsuccessful ![Image](/media/images/apidown.png) ### ScreenShot of login page with API Layer up ![Image](/media/images/loginpage.png) ### Encryption for private repos 1. From module core generate a secret key. ``` java -jar <path-to-jar>/core-2.0.5-SNAPSHOT.jar com.capitalone.dashboard.util.Encryption ``` 2. Add this generated key to api.properties ### api.properties ``` key=<your-generated-key> ``` 3. Add the same key to your repo settings file. This is needed for the target collector to decrypt your saved repo password. For example, if your repo is github add the following. ### github.properties ``` github.key=<your-generated-key> ```
sagarvsh/test
UI/README.md
Markdown
apache-2.0
3,309
/* * Copyright 2013 Muthukumaran (https://github.com/muthuishere/). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sshutils.views.console; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @Retention(RetentionPolicy.RUNTIME) public @interface ConsoleCommand { String value(); }
muthuishere/sshcommandbroadcaster
src/com/sshutils/views/console/ConsoleCommand.java
Java
apache-2.0
849
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; namespace Google.VisualStudioFake.Internal.Jobs { public interface IJobQueue { bool Empty { get; } void Push(IJob job); bool Pop(out IJob job); /// <summary> /// Pops all jobs matching the given predicate. /// </summary> /// <param name="predicate">Predicate for jobs to be popped</param> /// <returns>A collection of matching jobs</returns> IEnumerable<IJob> Pop(Predicate<IJob> predicate); } }
googlestadia/vsi-lldb
VSFake/VisualStudioFake/Internal/Jobs/IJobQueue.cs
C#
apache-2.0
1,124
/* Copyright (c) 2016 VMware, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package folder import ( "flag" "path" "github.com/RotatingFans/govmomi/govc/cli" "github.com/RotatingFans/govmomi/govc/flags" "golang.org/x/net/context" ) type create struct { *flags.DatacenterFlag pod bool } func init() { cli.Register("folder.create", &create{}) } func (cmd *create) Register(ctx context.Context, f *flag.FlagSet) { cmd.DatacenterFlag, ctx = flags.NewDatacenterFlag(ctx) cmd.DatacenterFlag.Register(ctx, f) f.BoolVar(&cmd.pod, "pod", false, "Create folder(s) of type StoragePod (DatastoreCluster)") } func (cmd *create) Usage() string { return "PATH..." } func (cmd *create) Description() string { return `Create folder with PATH. Example: govc folder.create /dc1/vm/folder-foo ` } func (cmd *create) Process(ctx context.Context) error { if err := cmd.DatacenterFlag.Process(ctx); err != nil { return err } return nil } func (cmd *create) Run(ctx context.Context, f *flag.FlagSet) error { finder, err := cmd.Finder() if err != nil { return err } for _, arg := range f.Args() { dir := path.Dir(arg) name := path.Base(arg) if dir == "" { dir = "/" } folder, err := finder.Folder(ctx, dir) if err != nil { return err } var create func() error if cmd.pod { create = func() error { _, err = folder.CreateStoragePod(ctx, name) return err } } else { create = func() error { _, err = folder.CreateFolder(ctx, name) return err } } err = create() if err != nil { return err } } return nil }
RotatingFans/govmomi
govc/folder/create.go
GO
apache-2.0
2,092
SET NAMES 'utf8'; ALTER TABLE `PREFIX_order_invoice_tax` ADD INDEX (`id_tax`); INSERT IGNORE INTO `PREFIX_meta` (`id_meta`, `page`, `configurable`) VALUES (NULL, 'products-comparison', '1'), (NULL, 'cms', '0'), (NULL, 'category', '0'), (NULL, 'product', '0'), (NULL, 'module-bankwire-payment', '0'), (NULL, 'module-bankwire-validation', '0'), (NULL, 'module-cheque-validation', '0'), (NULL, 'module-cheque-payment', '0'); INSERT IGNORE INTO `PREFIX_theme_meta` ( `id_theme` , `id_meta` , `left_column` , `right_column` ) SELECT `PREFIX_theme`.`id_theme` , `PREFIX_meta`.`id_meta` , `default_left_column` , `default_right_column` FROM `PREFIX_theme` , `PREFIX_meta`; ALTER TABLE `PREFIX_tab` ADD `hide_host_mode` tinyint(1) NOT NULL DEFAULT '0' AFTER `active`; UPDATE `PREFIX_employee` SET `bo_theme` = 'default'; DELETE FROM `PREFIX_image_type` WHERE `name` = 'cart_default'; INSERT INTO `PREFIX_image_type` (`id_image_type`,`name`,`width`,`height`,`products`,`categories`,`manufacturers`,`suppliers`,`scenes`,`stores`) VALUES (NULL, 'cart_default', '80', '80', '1', '0', '0', '0', '0', '0'); ALTER TABLE `PREFIX_cart_rule_combination` ADD INDEX `id_cart_rule_1` (`id_cart_rule_1`); ALTER TABLE `PREFIX_cart_rule_combination` ADD INDEX `id_cart_rule_2` (`id_cart_rule_2`); /* PHP:p1606module_exceptions(); */;
djfm/pstaf
FunctionalTest/UpgradeTest/prestashop/install-dev/upgrade/sql/1.6.0.6.sql
SQL
apache-2.0
1,340
package com.tlorrain.android.rezenerator.core.log; public class NoopLogger implements Logger { @Override public void info(String info) { } @Override public void verbose(String debug) { } @Override public void error(String error) { } @Override public void verbose(Exception exception) { } }
fredszaq/Rezenerator
rezenerator-core/src/main/java/com/tlorrain/android/rezenerator/core/log/NoopLogger.java
Java
apache-2.0
308
<?php namespace Modules\Blog\Http\Controllers; use Illuminate\Routing\Controller; class CategoryController extends Controller { public function show($slug) { dd('NYI'); } }
vainproject/vain-blog
src/Blog/Http/Controllers/CategoryController.php
PHP
apache-2.0
196
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_12) on Sun Feb 08 17:27:07 PST 2009 --> <META http-equiv="Content-Type" content="text/html; charset=ISO-8859-1"> <TITLE> net.sourceforge.pmd.util.viewer.gui.menu Class Hierarchy (PMD 4.2.5 API) </TITLE> <META NAME="date" CONTENT="2009-02-08"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="net.sourceforge.pmd.util.viewer.gui.menu Class Hierarchy (PMD 4.2.5 API)"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../../../../net/sourceforge/pmd/util/viewer/gui/package-tree.html"><B>PREV</B></A>&nbsp; &nbsp;<A HREF="../../../../../../../net/sourceforge/pmd/util/viewer/model/package-tree.html"><B>NEXT</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../../index.html?net/sourceforge/pmd/util/viewer/gui/menu/package-tree.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <CENTER> <H2> Hierarchy For Package net.sourceforge.pmd.util.viewer.gui.menu </H2> </CENTER> <DL> <DT><B>Package Hierarchies:</B><DD><A HREF="../../../../../../../overview-tree.html">All Packages</A></DL> <HR> <H2> Class Hierarchy </H2> <UL> <LI TYPE="circle">java.lang.Object<UL> <LI TYPE="circle">java.awt.Component (implements java.awt.image.ImageObserver, java.awt.MenuContainer, java.io.Serializable) <UL> <LI TYPE="circle">java.awt.Container<UL> <LI TYPE="circle">javax.swing.JComponent (implements java.io.Serializable) <UL> <LI TYPE="circle">javax.swing.AbstractButton (implements java.awt.ItemSelectable, javax.swing.SwingConstants) <UL> <LI TYPE="circle">javax.swing.JMenuItem (implements javax.accessibility.Accessible, javax.swing.MenuElement) <UL> <LI TYPE="circle">javax.swing.JMenu (implements javax.accessibility.Accessible, javax.swing.MenuElement) <UL> <LI TYPE="circle">net.sourceforge.pmd.util.viewer.gui.menu.<A HREF="../../../../../../../net/sourceforge/pmd/util/viewer/gui/menu/AttributesSubMenu.html" title="class in net.sourceforge.pmd.util.viewer.gui.menu"><B>AttributesSubMenu</B></A><LI TYPE="circle">net.sourceforge.pmd.util.viewer.gui.menu.<A HREF="../../../../../../../net/sourceforge/pmd/util/viewer/gui/menu/SimpleNodeSubMenu.html" title="class in net.sourceforge.pmd.util.viewer.gui.menu"><B>SimpleNodeSubMenu</B></A></UL> <LI TYPE="circle">net.sourceforge.pmd.util.viewer.gui.menu.<A HREF="../../../../../../../net/sourceforge/pmd/util/viewer/gui/menu/XPathFragmentAddingItem.html" title="class in net.sourceforge.pmd.util.viewer.gui.menu"><B>XPathFragmentAddingItem</B></A> (implements java.awt.event.ActionListener) </UL> </UL> <LI TYPE="circle">javax.swing.JPopupMenu (implements javax.accessibility.Accessible, javax.swing.MenuElement) <UL> <LI TYPE="circle">net.sourceforge.pmd.util.viewer.gui.menu.<A HREF="../../../../../../../net/sourceforge/pmd/util/viewer/gui/menu/ASTNodePopupMenu.html" title="class in net.sourceforge.pmd.util.viewer.gui.menu"><B>ASTNodePopupMenu</B></A></UL> </UL> </UL> </UL> </UL> </UL> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../../../../net/sourceforge/pmd/util/viewer/gui/package-tree.html"><B>PREV</B></A>&nbsp; &nbsp;<A HREF="../../../../../../../net/sourceforge/pmd/util/viewer/model/package-tree.html"><B>NEXT</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../../index.html?net/sourceforge/pmd/util/viewer/gui/menu/package-tree.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> Copyright &copy; 2002-2009 InfoEther. All Rights Reserved. </BODY> </HTML>
deleidos/digitaledge-platform
commons/buildtools/pmd/docs/apidocs/net/sourceforge/pmd/util/viewer/gui/menu/package-tree.html
HTML
apache-2.0
8,134
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis.skylark; import static com.google.devtools.build.lib.analysis.BaseRuleClasses.RUN_UNDER; import static com.google.devtools.build.lib.packages.Attribute.attr; import static com.google.devtools.build.lib.packages.BuildType.LABEL; import static com.google.devtools.build.lib.packages.BuildType.LABEL_LIST; import static com.google.devtools.build.lib.packages.BuildType.LICENSE; import static com.google.devtools.build.lib.syntax.SkylarkType.castMap; import static com.google.devtools.build.lib.syntax.Type.BOOLEAN; import static com.google.devtools.build.lib.syntax.Type.INTEGER; import static com.google.devtools.build.lib.syntax.Type.STRING; import static com.google.devtools.build.lib.syntax.Type.STRING_LIST; import com.google.common.base.Preconditions; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.BaseRuleClasses; import com.google.devtools.build.lib.analysis.TemplateVariableInfo; import com.google.devtools.build.lib.analysis.config.ConfigAwareRuleClassBuilder; import com.google.devtools.build.lib.analysis.config.HostTransition; import com.google.devtools.build.lib.analysis.skylark.SkylarkAttr.Descriptor; import com.google.devtools.build.lib.analysis.test.TestConfiguration; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.LabelSyntaxException; import com.google.devtools.build.lib.cmdline.LabelValidator; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.AttributeMap; import com.google.devtools.build.lib.packages.AttributeValueSource; import com.google.devtools.build.lib.packages.ImplicitOutputsFunction.SkylarkImplicitOutputsFunctionWithCallback; import com.google.devtools.build.lib.packages.ImplicitOutputsFunction.SkylarkImplicitOutputsFunctionWithMap; import com.google.devtools.build.lib.packages.Package.NameConflictException; import com.google.devtools.build.lib.packages.PackageFactory; import com.google.devtools.build.lib.packages.PackageFactory.PackageContext; import com.google.devtools.build.lib.packages.PredicateWithMessage; import com.google.devtools.build.lib.packages.Provider; import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.packages.RuleClass.Builder.RuleClassType; import com.google.devtools.build.lib.packages.RuleClass.ExecutionPlatformConstraintsAllowed; import com.google.devtools.build.lib.packages.RuleFactory; import com.google.devtools.build.lib.packages.RuleFactory.BuildLangTypedAttributeValuesMap; import com.google.devtools.build.lib.packages.RuleFactory.InvalidRuleException; import com.google.devtools.build.lib.packages.RuleFunction; import com.google.devtools.build.lib.packages.SkylarkAspect; import com.google.devtools.build.lib.packages.SkylarkDefinedAspect; import com.google.devtools.build.lib.packages.SkylarkExportable; import com.google.devtools.build.lib.packages.SkylarkProvider; import com.google.devtools.build.lib.packages.SkylarkProviderIdentifier; import com.google.devtools.build.lib.packages.TargetUtils; import com.google.devtools.build.lib.packages.TestSize; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec; import com.google.devtools.build.lib.skylarkbuildapi.SkylarkRuleFunctionsApi; import com.google.devtools.build.lib.skylarkinterface.SkylarkPrinter; import com.google.devtools.build.lib.syntax.BaseFunction; import com.google.devtools.build.lib.syntax.Environment; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.EvalUtils; import com.google.devtools.build.lib.syntax.FuncallExpression; import com.google.devtools.build.lib.syntax.FunctionSignature; import com.google.devtools.build.lib.syntax.Runtime; import com.google.devtools.build.lib.syntax.SkylarkCallbackFunction; import com.google.devtools.build.lib.syntax.SkylarkDict; import com.google.devtools.build.lib.syntax.SkylarkList; import com.google.devtools.build.lib.syntax.SkylarkType; import com.google.devtools.build.lib.syntax.SkylarkUtils; import com.google.devtools.build.lib.syntax.Type; import com.google.devtools.build.lib.syntax.Type.ConversionException; import com.google.devtools.build.lib.util.FileTypeSet; import com.google.devtools.build.lib.util.Pair; import java.util.Map; import java.util.concurrent.ExecutionException; /** * A helper class to provide an easier API for Skylark rule definitions. */ public class SkylarkRuleClassFunctions implements SkylarkRuleFunctionsApi<Artifact> { // TODO(bazel-team): Copied from ConfiguredRuleClassProvider for the transition from built-in // rules to skylark extensions. Using the same instance would require a large refactoring. // If we don't want to support old built-in rules and Skylark simultaneously // (except for transition phase) it's probably OK. private static final LoadingCache<String, Label> labelCache = CacheBuilder.newBuilder() .build( new CacheLoader<String, Label>() { @Override public Label load(String from) throws Exception { try { return Label.parseAbsolute( from, /* defaultToMain=*/ false, /* repositoryMapping= */ ImmutableMap.of()); } catch (LabelSyntaxException e) { throw new Exception(from); } } }); // TODO(bazel-team): Remove the code duplication (BaseRuleClasses and this class). /** Parent rule class for non-executable non-test Skylark rules. */ public static final RuleClass baseRule = BaseRuleClasses.commonCoreAndSkylarkAttributes( BaseRuleClasses.nameAttribute( new RuleClass.Builder("$base_rule", RuleClassType.ABSTRACT, true)) .add(attr("expect_failure", STRING))) // TODO(skylark-team): Allow Skylark rules to extend native rules and remove duplication. .add( attr("toolchains", LABEL_LIST) .allowedFileTypes(FileTypeSet.NO_FILE) .mandatoryProviders(ImmutableList.of(TemplateVariableInfo.PROVIDER.id())) .dontCheckConstraints()) .build(); /** Parent rule class for executable non-test Skylark rules. */ public static final RuleClass binaryBaseRule = new RuleClass.Builder("$binary_base_rule", RuleClassType.ABSTRACT, true, baseRule) .add(attr("args", STRING_LIST)) .add(attr("output_licenses", LICENSE)) .build(); /** Parent rule class for test Skylark rules. */ public static final RuleClass getTestBaseRule(String toolsRepository) { return new RuleClass.Builder("$test_base_rule", RuleClassType.ABSTRACT, true, baseRule) .requiresConfigurationFragments(TestConfiguration.class) .add( attr("size", STRING) .value("medium") .taggable() .nonconfigurable("used in loading phase rule validation logic")) .add( attr("timeout", STRING) .taggable() .nonconfigurable("used in loading phase rule validation logic") .value(timeoutAttribute)) .add( attr("flaky", BOOLEAN) .value(false) .taggable() .nonconfigurable("taggable - called in Rule.getRuleTags")) .add(attr("shard_count", INTEGER).value(-1)) .add( attr("local", BOOLEAN) .value(false) .taggable() .nonconfigurable( "policy decision: this should be consistent across configurations")) .add(attr("args", STRING_LIST)) // Input files for every test action .add( attr("$test_runtime", LABEL_LIST) .cfg(HostTransition.INSTANCE) .value( ImmutableList.of( labelCache.getUnchecked(toolsRepository + "//tools/test:runtime")))) .add( attr("$test_setup_script", LABEL) .cfg(HostTransition.INSTANCE) .singleArtifact() .value(labelCache.getUnchecked(toolsRepository + "//tools/test:test_setup"))) .add( attr("$xml_generator_script", LABEL) .cfg(HostTransition.INSTANCE) .singleArtifact() .value( labelCache.getUnchecked(toolsRepository + "//tools/test:test_xml_generator"))) .add( attr("$collect_coverage_script", LABEL) .cfg(HostTransition.INSTANCE) .singleArtifact() .value(labelCache.getUnchecked(toolsRepository + "//tools/test:collect_coverage"))) // Input files for test actions collecting code coverage .add( attr(":coverage_support", LABEL) .cfg(HostTransition.INSTANCE) .value( BaseRuleClasses.coverageSupportAttribute( labelCache.getUnchecked( toolsRepository + BaseRuleClasses.DEFAULT_COVERAGE_SUPPORT_VALUE)))) // Used in the one-per-build coverage report generation action. .add( attr(":coverage_report_generator", LABEL) .cfg(HostTransition.INSTANCE) .value( BaseRuleClasses.coverageReportGeneratorAttribute( labelCache.getUnchecked( toolsRepository + BaseRuleClasses.DEFAULT_COVERAGE_REPORT_GENERATOR_VALUE)))) .add(attr(":run_under", LABEL).value(RUN_UNDER)) .executionPlatformConstraintsAllowed(ExecutionPlatformConstraintsAllowed.PER_TARGET) .build(); } @AutoCodec @AutoCodec.VisibleForSerialization static final Attribute.ComputedDefault timeoutAttribute = new Attribute.ComputedDefault() { @Override public Object getDefault(AttributeMap rule) { TestSize size = TestSize.getTestSize(rule.get("size", Type.STRING)); if (size != null) { String timeout = size.getDefaultTimeout().toString(); if (timeout != null) { return timeout; } } return "illegal"; } }; @Override public Provider provider(String doc, Object fields, Location location) throws EvalException { Iterable<String> fieldNames = null; if (fields instanceof SkylarkList<?>) { @SuppressWarnings("unchecked") SkylarkList<String> list = (SkylarkList<String>) SkylarkType.cast( fields, SkylarkList.class, String.class, location, "Expected list of strings or dictionary of string -> string for 'fields'"); fieldNames = list; } else if (fields instanceof SkylarkDict) { Map<String, String> dict = SkylarkType.castMap( fields, String.class, String.class, "Expected list of strings or dictionary of string -> string for 'fields'"); fieldNames = dict.keySet(); } return SkylarkProvider.createUnexportedSchemaful(fieldNames, location); } // TODO(bazel-team): implement attribute copy and other rule properties @Override @SuppressWarnings({"rawtypes", "unchecked"}) // castMap produces // an Attribute.Builder instead of a Attribute.Builder<?> but it's OK. public BaseFunction rule( BaseFunction implementation, Boolean test, Object attrs, Object implicitOutputs, Boolean executable, Boolean outputToGenfiles, SkylarkList<?> fragments, SkylarkList<?> hostFragments, Boolean skylarkTestable, SkylarkList<?> toolchains, String doc, SkylarkList<?> providesArg, Boolean executionPlatformConstraintsAllowed, SkylarkList<?> execCompatibleWith, FuncallExpression ast, Environment funcallEnv) throws EvalException, ConversionException { funcallEnv.checkLoadingOrWorkspacePhase("rule", ast.getLocation()); RuleClassType type = test ? RuleClassType.TEST : RuleClassType.NORMAL; RuleClass parent = test ? getTestBaseRule(SkylarkUtils.getToolsRepository(funcallEnv)) : (executable ? binaryBaseRule : baseRule); // We'll set the name later, pass the empty string for now. RuleClass.Builder builder = new RuleClass.Builder("", type, true, parent); ImmutableList<Pair<String, SkylarkAttr.Descriptor>> attributes = attrObjectToAttributesList(attrs, ast); if (skylarkTestable) { builder.setSkylarkTestable(); } if (executable || test) { addAttribute( ast.getLocation(), builder, attr("$is_executable", BOOLEAN) .value(true) .nonconfigurable("Called from RunCommand.isExecutable, which takes a Target") .build()); builder.setExecutableSkylark(); } if (implicitOutputs != Runtime.NONE) { if (implicitOutputs instanceof BaseFunction) { BaseFunction func = (BaseFunction) implicitOutputs; SkylarkCallbackFunction callback = new SkylarkCallbackFunction(func, ast, funcallEnv.getSemantics()); builder.setImplicitOutputsFunction( new SkylarkImplicitOutputsFunctionWithCallback(callback, ast.getLocation())); } else { builder.setImplicitOutputsFunction( new SkylarkImplicitOutputsFunctionWithMap( ImmutableMap.copyOf( castMap( implicitOutputs, String.class, String.class, "implicit outputs of the rule class")))); } } if (outputToGenfiles) { builder.setOutputToGenfiles(); } builder.requiresConfigurationFragmentsBySkylarkModuleName( fragments.getContents(String.class, "fragments")); ConfigAwareRuleClassBuilder.of(builder) .requiresHostConfigurationFragmentsBySkylarkModuleName( hostFragments.getContents(String.class, "host_fragments")); builder.setConfiguredTargetFunction(implementation); builder.setRuleDefinitionEnvironmentLabelAndHashCode( funcallEnv.getGlobals().getTransitiveLabel(), funcallEnv.getTransitiveContentHashCode()); builder.addRequiredToolchains( collectToolchainLabels( toolchains.getContents(String.class, "toolchains"), ast.getLocation())); for (Object o : providesArg) { if (!SkylarkAttr.isProvider(o)) { throw new EvalException( ast.getLocation(), String.format( "Illegal argument: element in 'provides' is of unexpected type. " + "Should be list of providers, but got item of type %s.", EvalUtils.getDataTypeName(o, true))); } } for (SkylarkProviderIdentifier skylarkProvider : SkylarkAttr.getSkylarkProviderIdentifiers(providesArg, ast.getLocation())) { builder.advertiseSkylarkProvider(skylarkProvider); } if (!execCompatibleWith.isEmpty()) { builder.addExecutionPlatformConstraints( collectConstraintLabels( execCompatibleWith.getContents(String.class, "exec_compatile_with"), ast.getLocation())); } if (executionPlatformConstraintsAllowed) { builder.executionPlatformConstraintsAllowed(ExecutionPlatformConstraintsAllowed.PER_TARGET); } return new SkylarkRuleFunction(builder, type, attributes, ast.getLocation()); } protected static ImmutableList<Pair<String, Descriptor>> attrObjectToAttributesList( Object attrs, FuncallExpression ast) throws EvalException { ImmutableList.Builder<Pair<String, Descriptor>> attributes = ImmutableList.builder(); if (attrs != Runtime.NONE) { for (Map.Entry<String, Descriptor> attr : castMap(attrs, String.class, Descriptor.class, "attrs").entrySet()) { Descriptor attrDescriptor = attr.getValue(); AttributeValueSource source = attrDescriptor.getValueSource(); String attrName = source.convertToNativeName(attr.getKey(), ast.getLocation()); attributes.add(Pair.of(attrName, attrDescriptor)); } } return attributes.build(); } private static void addAttribute( Location location, RuleClass.Builder builder, Attribute attribute) throws EvalException { try { builder.addOrOverrideAttribute(attribute); } catch (IllegalArgumentException ex) { throw new EvalException(location, ex); } } private static ImmutableList<Label> collectToolchainLabels( Iterable<String> rawLabels, Location loc) throws EvalException { ImmutableList.Builder<Label> requiredToolchains = new ImmutableList.Builder<>(); for (String rawLabel : rawLabels) { try { Label toolchainLabel = Label.parseAbsolute(rawLabel, ImmutableMap.of()); requiredToolchains.add(toolchainLabel); } catch (LabelSyntaxException e) { throw new EvalException( loc, String.format("Unable to parse toolchain %s: %s", rawLabel, e.getMessage()), e); } } return requiredToolchains.build(); } private static ImmutableList<Label> collectConstraintLabels( Iterable<String> rawLabels, Location loc) throws EvalException { ImmutableList.Builder<Label> constraintLabels = new ImmutableList.Builder<>(); for (String rawLabel : rawLabels) { try { Label constraintLabel = Label.parseAbsolute(rawLabel, ImmutableMap.of()); constraintLabels.add(constraintLabel); } catch (LabelSyntaxException e) { throw new EvalException( loc, String.format("Unable to parse constraint %s: %s", rawLabel, e.getMessage()), e); } } return constraintLabels.build(); } @Override public SkylarkAspect aspect( BaseFunction implementation, SkylarkList<?> attributeAspects, Object attrs, SkylarkList<?> requiredAspectProvidersArg, SkylarkList<?> providesArg, SkylarkList<?> fragments, SkylarkList<?> hostFragments, SkylarkList<?> toolchains, String doc, FuncallExpression ast, Environment funcallEnv) throws EvalException { Location location = ast.getLocation(); ImmutableList.Builder<String> attrAspects = ImmutableList.builder(); for (Object attributeAspect : attributeAspects) { String attrName = STRING.convert(attributeAspect, "attr_aspects"); if (attrName.equals("*") && attributeAspects.size() != 1) { throw new EvalException( ast.getLocation(), "'*' must be the only string in 'attr_aspects' list"); } if (!attrName.startsWith("_")) { attrAspects.add(attrName); } else { // Implicit attribute names mean either implicit or late-bound attributes // (``$attr`` or ``:attr``). Depend on both. attrAspects.add( AttributeValueSource.COMPUTED_DEFAULT.convertToNativeName(attrName, location)); attrAspects.add( AttributeValueSource.LATE_BOUND.convertToNativeName(attrName, location)); } } ImmutableList<Pair<String, SkylarkAttr.Descriptor>> descriptors = attrObjectToAttributesList(attrs, ast); ImmutableList.Builder<Attribute> attributes = ImmutableList.builder(); ImmutableSet.Builder<String> requiredParams = ImmutableSet.builder(); for (Pair<String, Descriptor> nameDescriptorPair : descriptors) { String nativeName = nameDescriptorPair.first; boolean hasDefault = nameDescriptorPair.second.hasDefault(); Attribute attribute = nameDescriptorPair.second.build(nameDescriptorPair.first); if (attribute.getType() == Type.STRING && ((String) attribute.getDefaultValue(null)).isEmpty()) { hasDefault = false; // isValueSet() is always true for attr.string. } if (!Attribute.isImplicit(nativeName) && !Attribute.isLateBound(nativeName)) { if (!attribute.checkAllowedValues() || attribute.getType() != Type.STRING) { throw new EvalException( ast.getLocation(), String.format( "Aspect parameter attribute '%s' must have type 'string' and use the " + "'values' restriction.", nativeName)); } if (!hasDefault) { requiredParams.add(nativeName); } else { PredicateWithMessage<Object> allowed = attribute.getAllowedValues(); Object defaultVal = attribute.getDefaultValue(null); if (!allowed.apply(defaultVal)) { throw new EvalException( ast.getLocation(), String.format( "Aspect parameter attribute '%s' has a bad default value: %s", nativeName, allowed.getErrorReason(defaultVal))); } } } else if (!hasDefault) { // Implicit or late bound attribute String skylarkName = "_" + nativeName.substring(1); throw new EvalException( ast.getLocation(), String.format("Aspect attribute '%s' has no default value.", skylarkName)); } attributes.add(attribute); } for (Object o : providesArg) { if (!SkylarkAttr.isProvider(o)) { throw new EvalException( ast.getLocation(), String.format( "Illegal argument: element in 'provides' is of unexpected type. " + "Should be list of providers, but got item of type %s. ", EvalUtils.getDataTypeName(o, true))); } } return new SkylarkDefinedAspect( implementation, attrAspects.build(), attributes.build(), SkylarkAttr.buildProviderPredicate( requiredAspectProvidersArg, "required_aspect_providers", ast.getLocation()), SkylarkAttr.getSkylarkProviderIdentifiers(providesArg, ast.getLocation()), requiredParams.build(), ImmutableSet.copyOf(fragments.getContents(String.class, "fragments")), HostTransition.INSTANCE, ImmutableSet.copyOf(hostFragments.getContents(String.class, "host_fragments")), collectToolchainLabels( toolchains.getContents(String.class, "toolchains"), ast.getLocation())); } /** * The implementation for the magic function "rule" that creates Skylark rule classes. * * <p>Exactly one of {@link #builder} or {@link #ruleClass} is null except inside {@link #export}. */ public static final class SkylarkRuleFunction extends BaseFunction implements SkylarkExportable, RuleFunction { private RuleClass.Builder builder; private RuleClass ruleClass; private final RuleClassType type; private ImmutableList<Pair<String, SkylarkAttr.Descriptor>> attributes; private final Location definitionLocation; private Label skylarkLabel; public SkylarkRuleFunction( RuleClass.Builder builder, RuleClassType type, ImmutableList<Pair<String, SkylarkAttr.Descriptor>> attributes, Location definitionLocation) { super("rule", FunctionSignature.KWARGS); this.builder = builder; this.type = type; this.attributes = attributes; this.definitionLocation = definitionLocation; } /** This is for post-export reconstruction for serialization. */ private SkylarkRuleFunction( RuleClass ruleClass, RuleClassType type, Location definitionLocation, Label skylarkLabel) { super("rule", FunctionSignature.KWARGS); Preconditions.checkNotNull( ruleClass, "RuleClass must be non-null as this SkylarkRuleFunction should have been exported."); Preconditions.checkNotNull( skylarkLabel, "Label must be non-null as this SkylarkRuleFunction should have been exported."); this.ruleClass = ruleClass; this.type = type; this.definitionLocation = definitionLocation; this.skylarkLabel = skylarkLabel; } @Override @SuppressWarnings("unchecked") // the magic hidden $pkg_context variable is guaranteed // to be a PackageContext public Object call(Object[] args, FuncallExpression ast, Environment env) throws EvalException, InterruptedException, ConversionException { env.checkLoadingPhase(getName(), ast.getLocation()); if (ruleClass == null) { throw new EvalException(ast.getLocation(), "Invalid rule class hasn't been exported by a Skylark file"); } for (Attribute attribute : ruleClass.getAttributes()) { // TODO(dslomov): If a Skylark parameter extractor is specified for this aspect, its // attributes may not be required. for (Map.Entry<String, ImmutableSet<String>> attrRequirements : attribute.getRequiredAspectParameters().entrySet()) { for (String required : attrRequirements.getValue()) { if (!ruleClass.hasAttr(required, Type.STRING)) { throw new EvalException(definitionLocation, String.format( "Aspect %s requires rule %s to specify attribute '%s' with type string.", attrRequirements.getKey(), ruleClass.getName(), required)); } } } } BuildLangTypedAttributeValuesMap attributeValues = new BuildLangTypedAttributeValuesMap((Map<String, Object>) args[0]); try { PackageContext pkgContext = (PackageContext) env.lookup(PackageFactory.PKG_CONTEXT); if (pkgContext == null) { throw new EvalException(ast.getLocation(), "Cannot instantiate a rule when loading a .bzl file. Rules can only be called from " + "a BUILD file (possibly via a macro)."); } RuleFactory.createAndAddRule( pkgContext, ruleClass, attributeValues, ast, env, pkgContext.getAttributeContainerFactory().apply(ruleClass)); return Runtime.NONE; } catch (InvalidRuleException | NameConflictException e) { throw new EvalException(ast.getLocation(), e.getMessage()); } } /** * Export a RuleFunction from a Skylark file with a given name. */ public void export(Label skylarkLabel, String ruleClassName) throws EvalException { Preconditions.checkState(ruleClass == null && builder != null); this.skylarkLabel = skylarkLabel; if (type == RuleClassType.TEST != TargetUtils.isTestRuleName(ruleClassName)) { throw new EvalException(definitionLocation, "Invalid rule class name '" + ruleClassName + "', test rule class names must end with '_test' and other rule classes must not"); } for (Pair<String, SkylarkAttr.Descriptor> attribute : attributes) { SkylarkAttr.Descriptor descriptor = attribute.getSecond(); addAttribute(definitionLocation, builder, descriptor.build(attribute.getFirst())); } try { this.ruleClass = builder.build(ruleClassName, skylarkLabel + "%" + ruleClassName); } catch (IllegalArgumentException | IllegalStateException ex) { throw new EvalException(location, ex); } this.builder = null; this.attributes = null; } public RuleClass getRuleClass() { Preconditions.checkState(ruleClass != null && builder == null); return ruleClass; } @Override public boolean isExported() { return skylarkLabel != null; } @Override public void repr(SkylarkPrinter printer) { printer.append("<rule>"); } } @Override public Label label( String labelString, Boolean relativeToCallerRepository, Location loc, Environment env) throws EvalException { Label parentLabel = null; if (relativeToCallerRepository) { parentLabel = env.getCallerLabel(); } else { parentLabel = env.getGlobals().getTransitiveLabel(); } try { if (parentLabel != null) { LabelValidator.parseAbsoluteLabel(labelString); // TODO(dannark): pass the environment here labelString = parentLabel .getRelativeWithRemapping(labelString, ImmutableMap.of()) .getUnambiguousCanonicalForm(); } return labelCache.get(labelString); } catch (LabelValidator.BadLabelException | LabelSyntaxException | ExecutionException e) { throw new EvalException(loc, "Illegal absolute label syntax: " + labelString); } } @Override public SkylarkFileType fileType(SkylarkList types, Location loc, Environment env) throws EvalException { if (env.getSemantics().incompatibleDisallowFileType()) { throw new EvalException( loc, "FileType function is not available. You may use a list of strings instead. " + "You can temporarily reenable the function by passing the flag " + "--incompatible_disallow_filetype=false"); } return SkylarkFileType.of(types.getContents(String.class, "types")); } }
dropbox/bazel
src/main/java/com/google/devtools/build/lib/analysis/skylark/SkylarkRuleClassFunctions.java
Java
apache-2.0
30,252
package com.java110.fee.bmo.feeFormula; import com.java110.po.feeFormula.FeeFormulaPo; import org.springframework.http.ResponseEntity; public interface IDeleteFeeFormulaBMO { /** * 修改费用公式 * add by wuxw * @param feeFormulaPo * @return */ ResponseEntity<String> delete(FeeFormulaPo feeFormulaPo); }
java110/MicroCommunity
service-fee/src/main/java/com/java110/fee/bmo/feeFormula/IDeleteFeeFormulaBMO.java
Java
apache-2.0
347
1 /* 2 * $Id: TestActionRedirect.java 514052 2007-03-03 02:00:37Z pbenedict $ 3 * 4 * Licensed to the Apache Software Foundation (ASF) under one 5 * or more contributor license agreements. See the NOTICE file 6 * distributed with this work for additional information 7 * regarding copyright ownership. The ASF licenses this file 8 * to you under the Apache License, Version 2.0 (the 9 * "License"); you may not use this file except in compliance 10 * with the License. You may obtain a copy of the License at 11 * 12 * http://www.apache.org/licenses/LICENSE-2.0 13 * 14 * Unless required by applicable law or agreed to in writing, 15 * software distributed under the License is distributed on an 16 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 17 * KIND, either express or implied. See the License for the 18 * specific language governing permissions and limitations 19 * under the License. 20 */ 21 package org.apache.struts.action; 22 23 import junit.framework.AssertionFailedError; 24 import junit.framework.ComparisonFailure; 25 import junit.framework.TestCase; 26 import junit.framework.TestSuite; 27 28 import java.util.Map; 29 30 /** 31 * <p>Unit tests for the {@link ActionRedirect} class.</p> 32 * 33 * @version $Rev: 514052 $ $Date: 2007-03-02 20:00:37 -0600 (Fri, 02 Mar 2007) $ 34 */ 35 public class TestActionRedirect extends TestCase { 36 public TestActionRedirect(String s) { 37 super(s); 38 } 39 40 public static TestSuite getSuite() { 41 return new TestSuite(TestActionRedirect.class); 42 } 43 44 public static void main(String[] args) { 45 junit.textui.TestRunner runner = new junit.textui.TestRunner(); 46 47 runner.doRun(TestActionRedirect.getSuite()); 48 } 49 50 // ----------------------------------------------------- Test Methods 51 52 /** 53 * Check that the redirect flag is set. 54 */ 55 public void testActionRedirectRedirectFlag() { 56 ActionRedirect ar = new ActionRedirect("/path.do"); 57 58 assertTrue("Redirect flag should be set to true.", ar.getRedirect()); 59 } 60 61 /** 62 * Test all addParameter methods accepting different data types. 63 */ 64 public void testActionRedirectAddParameter() { 65 ActionRedirect ar = new ActionRedirect("/path.do"); 66 67 ar.addParameter("st", "test"); 68 ar.addParameter("obj", new StringBuffer("someString")); 69 70 assertTrue("Incorrect path", ar.getPath().indexOf("/path.do") == 0); 71 assertHasParameter(ar.parameterValues, "st", "test"); 72 assertHasParameter(ar.parameterValues, "obj", "someString"); 73 } 74 75 /** 76 * Test redirect with anchor. 77 */ 78 public void testActionRedirectWithAnchor() { 79 ActionRedirect ar = new ActionRedirect("/path.do"); 80 81 ar.addParameter("st", "test"); 82 ar.setAnchor("foo"); 83 84 assertTrue("Incorrect path", "/path.do?st=test#foo".equals(ar.getPath())); 85 } 86 87 /** 88 * Test adding parameters with the same name. 89 */ 90 public void testActionRedirectAddSameNameParameter() { 91 ActionRedirect ar = new ActionRedirect("/path.do"); 92 93 ar.addParameter("param", "param1"); 94 ar.addParameter("param", "param2"); 95 ar.addParameter("param", new StringBuffer("someString")); 96 97 assertTrue("Incorrect path", ar.getPath().indexOf("/path.do") == 0); 98 assertHasParameter(ar.parameterValues, "param", "param1"); 99 assertHasParameter(ar.parameterValues, "param", "param2"); 100 assertHasParameter(ar.parameterValues, "param", "someString"); 101 assertEquals("Incorrect number of parameters", 3, 102 countParameters(ar.parameterValues, "param")); 103 } 104 105 /** 106 * Test creating an ActionRedirect which copies its configuration from an 107 * existing ActionForward (except for the "redirect" property). 108 */ 109 public void testActionRedirectFromExistingForward() { 110 ActionForward forward = new ActionForward("/path.do?param=param1"); 111 forward.setRedirect(false); 112 forward.setProperty("key","value"); 113 114 ActionRedirect ar = new ActionRedirect(forward); 115 116 ar.addParameter("param", "param2"); 117 ar.addParameter("object1", new StringBuffer("someString")); 118 119 assertTrue("Incorrect path", ar.getPath().indexOf("/path.do") == 0); 120 assertHasParameter(ar.parameterValues, "param", "param2"); 121 assertHasParameter(ar.parameterValues, "object1", "someString"); 122 assertEquals("Incorrect original path.", forward.getPath(), 123 ar.getOriginalPath()); 124 assertEquals("Incorrect or missing property", "value", 125 ar.getProperty("key")); 126 assertTrue("Original had redirect to false", ar.getRedirect()); 127 } 128 129 /** 130 * Assert that the given parameters contains an entry for 131 * <code>paramValue</code> under the <code>paramName</code> key. <p/> 132 * 133 * @param parameters the map of parameters to check into 134 * @param paramName the key of the value to be checked 135 * @param paramValue the value to check for 136 */ 137 static void assertHasParameter(Map parameters, String paramName, 138 String paramValue) { 139 Object value = parameters.get(paramName); 140 141 if (value == null) { 142 throw new AssertionFailedError("Parameter [" + paramName 143 + "] not found"); 144 } 145 146 if (value instanceof String) { 147 if (!paramValue.equals(value)) { 148 throw new ComparisonFailure("Incorrect value found", 149 paramValue, (String) value); 150 } 151 } else if (value instanceof String[]) { 152 // see if our value is among those in the array 153 String[] values = (String[]) value; 154 155 for (int i = 0; i < values.length; i++) { 156 if (paramValue.equals(values[i])) { 157 return; 158 } 159 } 160 161 throw new AssertionFailedError( 162 "Expected value not found for parameter [" + paramName + "]"); 163 } else { 164 // can't recognize the value 165 throw new AssertionFailedError( 166 "Unexpected type found as parameter value for [" + paramName 167 + "]"); 168 } 169 } 170 171 /** 172 * Determine the number of values that are available for a specific 173 * parameter. <p/> 174 * 175 * @param parameters the map of parameters to check into 176 * @param paramName the key of the value(s) to count 177 * @return the number of values for the specified parameter 178 */ 179 static int countParameters(Map parameters, String paramName) { 180 Object value = parameters.get(paramName); 181 182 if (value == null) { 183 return 0; 184 } 185 186 if (value instanceof String) { 187 return 1; 188 } else if (value instanceof String[]) { 189 String[] values = (String[]) value; 190 191 return values.length; 192 } else { 193 // can't recognize the value 194 throw new AssertionFailedError( 195 "Unexpected type found as parameter value for [" + paramName 196 + "]"); 197 } 198 } 199 } ------------------------------------------------------------------------ This page was automatically generated by [Maven](http://maven.apache.org/)
ExclamationLabs/struts-1.3.10_docs
struts-core/xref-test/org/apache/struts/action/TestActionRedirect.html.md
Markdown
apache-2.0
8,746
package org.danielnixon.playwarts import org.wartremover.{ WartTraverser, WartUniverse } abstract class ObjectMultiWart( wartClassName: String, targetObjectName: String, methods: List[(String, String)] ) extends WartTraverser { class Op(name: String, error: String) extends WartTraverser { override lazy val className: String = wartClassName def apply(u: WartUniverse): u.Traverser = { import u.universe._ val symbol = rootMirror.staticModule(targetObjectName) val Name = TermName(name) new u.Traverser { override def traverse(tree: Tree): Unit = { tree match { // Ignore trees marked by SuppressWarnings case t if hasWartAnnotation(u)(t) => case Select(tpt, Name) if tpt.tpe.contains(symbol) => error(u)(tree.pos, error) case _ => super.traverse(tree) } } } } } def apply(u: WartUniverse): u.Traverser = WartTraverser.sumList(u)(methods.map(method => new Op(method._1, method._2))) }
danielnixon/playwarts
core/src/main/scala/org/danielnixon/playwarts/ObjectMultiWart.scala
Scala
apache-2.0
1,040
/* * Copyright 2015 Foundational Development * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package pro.foundev.reporting case class RunTimeOptions(host: String, runs: Int, recordsToIngest: Long, enableLauncher: Boolean) { }
rssvihla/datastax_work
spark_commons/benchmarks/low_latency_spark/src/main/scala/pro/foundev/reporting/RunTimeOptions.scala
Scala
apache-2.0
766
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.hc.core5.testing.nio; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Set; import java.util.concurrent.Future; import java.util.concurrent.ThreadFactory; import org.apache.hc.core5.function.Callback; import org.apache.hc.core5.reactor.DefaultListeningIOReactor; import org.apache.hc.core5.reactor.IOEventHandlerFactory; import org.apache.hc.core5.reactor.IOReactorConfig; import org.apache.hc.core5.reactor.IOSession; import org.apache.hc.core5.reactor.ListenerEndpoint; public class AsyncServer extends IOReactorExecutor<DefaultListeningIOReactor> { public AsyncServer(final IOReactorConfig ioReactorConfig) { super(ioReactorConfig, null); } @Override DefaultListeningIOReactor createIOReactor( final IOEventHandlerFactory ioEventHandlerFactory, final IOReactorConfig ioReactorConfig, final ThreadFactory threadFactory, final Callback<IOSession> sessionShutdownCallback) throws IOException { return new DefaultListeningIOReactor(ioEventHandlerFactory, ioReactorConfig, threadFactory, threadFactory, LoggingIOSessionDecorator.INSTANCE, LoggingIOSessionListener.INSTANCE, sessionShutdownCallback); } public Future<ListenerEndpoint> listen(final InetSocketAddress address) { return reactor().listen(address, null); } public Set<ListenerEndpoint> getEndpoints() { return reactor().getEndpoints(); } }
ok2c/httpcore
httpcore5-testing/src/main/java/org/apache/hc/core5/testing/nio/AsyncServer.java
Java
apache-2.0
2,674
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.integration; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.common.utils.MockTime; import org.apache.kafka.streams.kstream.Consumed; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.Topology; import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster; import org.apache.kafka.streams.integration.utils.IntegrationTestUtils; import org.apache.kafka.streams.kstream.ForeachAction; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.Materialized; import org.apache.kafka.streams.kstream.Reducer; import org.apache.kafka.streams.processor.Processor; import org.apache.kafka.streams.processor.ProcessorContext; import org.apache.kafka.streams.processor.ProcessorSupplier; import org.apache.kafka.streams.processor.StateRestoreListener; import org.apache.kafka.streams.processor.TaskId; import org.apache.kafka.streams.processor.internals.StateDirectory; import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; import org.apache.kafka.streams.state.KeyValueStore; import org.apache.kafka.streams.state.StoreBuilder; import org.apache.kafka.streams.state.Stores; import org.apache.kafka.streams.state.internals.KeyValueStoreBuilder; import org.apache.kafka.streams.state.internals.OffsetCheckpoint; import org.apache.kafka.test.IntegrationTest; import org.apache.kafka.test.TestUtils; import org.junit.After; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; import java.io.File; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Properties; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsEqual.equalTo; import static org.junit.Assert.assertTrue; @Category({IntegrationTest.class}) public class RestoreIntegrationTest { private static final int NUM_BROKERS = 1; private static final String APPID = "restore-test"; @ClassRule public static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(NUM_BROKERS); private static final String INPUT_STREAM = "input-stream"; private static final String INPUT_STREAM_2 = "input-stream-2"; private final int numberOfKeys = 10000; private KafkaStreams kafkaStreams; @BeforeClass public static void createTopics() throws InterruptedException { CLUSTER.createTopic(INPUT_STREAM, 2, 1); CLUSTER.createTopic(INPUT_STREAM_2, 2, 1); CLUSTER.createTopic(APPID + "-store-changelog", 2, 1); } private Properties props(final String applicationId) { Properties streamsConfiguration = new Properties(); streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId); streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0); streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory(applicationId).getPath()); streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass()); streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass()); streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000); streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); return streamsConfiguration; } @After public void shutdown() { if (kafkaStreams != null) { kafkaStreams.close(30, TimeUnit.SECONDS); } } @Test public void shouldRestoreStateFromSourceTopic() throws Exception { final AtomicInteger numReceived = new AtomicInteger(0); final StreamsBuilder builder = new StreamsBuilder(); final Properties props = props(APPID); props.put(StreamsConfig.TOPOLOGY_OPTIMIZATION, StreamsConfig.OPTIMIZE); // restoring from 1000 to 4000 (committed), and then process from 4000 to 5000 on each of the two partitions final int offsetLimitDelta = 1000; final int offsetCheckpointed = 1000; createStateForRestoration(INPUT_STREAM); setCommittedOffset(INPUT_STREAM, offsetLimitDelta); final StateDirectory stateDirectory = new StateDirectory(new StreamsConfig(props), new MockTime()); new OffsetCheckpoint(new File(stateDirectory.directoryForTask(new TaskId(0, 0)), ".checkpoint")) .write(Collections.singletonMap(new TopicPartition(INPUT_STREAM, 0), (long) offsetCheckpointed)); new OffsetCheckpoint(new File(stateDirectory.directoryForTask(new TaskId(0, 1)), ".checkpoint")) .write(Collections.singletonMap(new TopicPartition(INPUT_STREAM, 1), (long) offsetCheckpointed)); final CountDownLatch startupLatch = new CountDownLatch(1); final CountDownLatch shutdownLatch = new CountDownLatch(1); builder.table(INPUT_STREAM, Consumed.with(Serdes.Integer(), Serdes.Integer())) .toStream() .foreach(new ForeachAction<Integer, Integer>() { @Override public void apply(final Integer key, final Integer value) { if (numReceived.incrementAndGet() == 2 * offsetLimitDelta) shutdownLatch.countDown(); } }); kafkaStreams = new KafkaStreams(builder.build(), props); kafkaStreams.setStateListener(new KafkaStreams.StateListener() { @Override public void onChange(final KafkaStreams.State newState, final KafkaStreams.State oldState) { if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) { startupLatch.countDown(); } } }); final AtomicLong restored = new AtomicLong(0); kafkaStreams.setGlobalStateRestoreListener(new StateRestoreListener() { @Override public void onRestoreStart(final TopicPartition topicPartition, final String storeName, final long startingOffset, final long endingOffset) { } @Override public void onBatchRestored(final TopicPartition topicPartition, final String storeName, final long batchEndOffset, final long numRestored) { } @Override public void onRestoreEnd(final TopicPartition topicPartition, final String storeName, final long totalRestored) { restored.addAndGet(totalRestored); } }); kafkaStreams.start(); assertTrue(startupLatch.await(30, TimeUnit.SECONDS)); assertThat(restored.get(), equalTo((long) numberOfKeys - offsetLimitDelta * 2 - offsetCheckpointed * 2)); assertTrue(shutdownLatch.await(30, TimeUnit.SECONDS)); assertThat(numReceived.get(), equalTo(offsetLimitDelta * 2)); } @Test public void shouldRestoreStateFromChangelogTopic() throws Exception { final AtomicInteger numReceived = new AtomicInteger(0); final StreamsBuilder builder = new StreamsBuilder(); final Properties props = props(APPID); // restoring from 1000 to 5000, and then process from 5000 to 10000 on each of the two partitions final int offsetCheckpointed = 1000; createStateForRestoration(APPID + "-store-changelog"); createStateForRestoration(INPUT_STREAM); final StateDirectory stateDirectory = new StateDirectory(new StreamsConfig(props), new MockTime()); new OffsetCheckpoint(new File(stateDirectory.directoryForTask(new TaskId(0, 0)), ".checkpoint")) .write(Collections.singletonMap(new TopicPartition(APPID + "-store-changelog", 0), (long) offsetCheckpointed)); new OffsetCheckpoint(new File(stateDirectory.directoryForTask(new TaskId(0, 1)), ".checkpoint")) .write(Collections.singletonMap(new TopicPartition(APPID + "-store-changelog", 1), (long) offsetCheckpointed)); final CountDownLatch startupLatch = new CountDownLatch(1); final CountDownLatch shutdownLatch = new CountDownLatch(1); builder.table(INPUT_STREAM, Consumed.with(Serdes.Integer(), Serdes.Integer()), Materialized.as("store")) .toStream() .foreach(new ForeachAction<Integer, Integer>() { @Override public void apply(final Integer key, final Integer value) { if (numReceived.incrementAndGet() == numberOfKeys) shutdownLatch.countDown(); } }); kafkaStreams = new KafkaStreams(builder.build(), props); kafkaStreams.setStateListener(new KafkaStreams.StateListener() { @Override public void onChange(final KafkaStreams.State newState, final KafkaStreams.State oldState) { if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) { startupLatch.countDown(); } } }); final AtomicLong restored = new AtomicLong(0); kafkaStreams.setGlobalStateRestoreListener(new StateRestoreListener() { @Override public void onRestoreStart(final TopicPartition topicPartition, final String storeName, final long startingOffset, final long endingOffset) { } @Override public void onBatchRestored(final TopicPartition topicPartition, final String storeName, final long batchEndOffset, final long numRestored) { } @Override public void onRestoreEnd(final TopicPartition topicPartition, final String storeName, final long totalRestored) { restored.addAndGet(totalRestored); } }); kafkaStreams.start(); assertTrue(startupLatch.await(30, TimeUnit.SECONDS)); assertThat(restored.get(), equalTo((long) numberOfKeys - 2 * offsetCheckpointed)); assertTrue(shutdownLatch.await(30, TimeUnit.SECONDS)); assertThat(numReceived.get(), equalTo(numberOfKeys)); } @Test public void shouldSuccessfullyStartWhenLoggingDisabled() throws InterruptedException { final StreamsBuilder builder = new StreamsBuilder(); final KStream<Integer, Integer> stream = builder.stream(INPUT_STREAM); stream.groupByKey() .reduce(new Reducer<Integer>() { @Override public Integer apply(final Integer value1, final Integer value2) { return value1 + value2; } }, Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as("reduce-store").withLoggingDisabled()); final CountDownLatch startupLatch = new CountDownLatch(1); kafkaStreams = new KafkaStreams(builder.build(), props(APPID)); kafkaStreams.setStateListener(new KafkaStreams.StateListener() { @Override public void onChange(final KafkaStreams.State newState, final KafkaStreams.State oldState) { if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) { startupLatch.countDown(); } } }); kafkaStreams.start(); assertTrue(startupLatch.await(30, TimeUnit.SECONDS)); } @Test public void shouldProcessDataFromStoresWithLoggingDisabled() throws InterruptedException, ExecutionException { IntegrationTestUtils.produceKeyValuesSynchronously(INPUT_STREAM_2, Arrays.asList(KeyValue.pair(1, 1), KeyValue.pair(2, 2), KeyValue.pair(3, 3)), TestUtils.producerConfig(CLUSTER.bootstrapServers(), IntegerSerializer.class, IntegerSerializer.class), CLUSTER.time); final KeyValueBytesStoreSupplier lruMapSupplier = Stores.lruMap(INPUT_STREAM_2, 10); final StoreBuilder<KeyValueStore<Integer, Integer>> storeBuilder = new KeyValueStoreBuilder<>(lruMapSupplier, Serdes.Integer(), Serdes.Integer(), CLUSTER.time) .withLoggingDisabled(); final StreamsBuilder streamsBuilder = new StreamsBuilder(); streamsBuilder.addStateStore(storeBuilder); final KStream<Integer, Integer> stream = streamsBuilder.stream(INPUT_STREAM_2); final CountDownLatch processorLatch = new CountDownLatch(3); stream.process(new ProcessorSupplier<Integer, Integer>() { @Override public Processor<Integer, Integer> get() { return new KeyValueStoreProcessor(INPUT_STREAM_2, processorLatch); } }, INPUT_STREAM_2); final Topology topology = streamsBuilder.build(); kafkaStreams = new KafkaStreams(topology, props(APPID + "-logging-disabled")); final CountDownLatch latch = new CountDownLatch(1); kafkaStreams.setStateListener(new KafkaStreams.StateListener() { @Override public void onChange(final KafkaStreams.State newState, final KafkaStreams.State oldState) { if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) { latch.countDown(); } } }); kafkaStreams.start(); latch.await(30, TimeUnit.SECONDS); assertTrue(processorLatch.await(30, TimeUnit.SECONDS)); } public static class KeyValueStoreProcessor implements Processor<Integer, Integer> { private String topic; private final CountDownLatch processorLatch; private KeyValueStore<Integer, Integer> store; public KeyValueStoreProcessor(final String topic, final CountDownLatch processorLatch) { this.topic = topic; this.processorLatch = processorLatch; } @Override public void init(final ProcessorContext context) { this.store = (KeyValueStore<Integer, Integer>) context.getStateStore(topic); } @Override public void process(final Integer key, final Integer value) { if (key != null) { store.put(key, value); processorLatch.countDown(); } } @Override public void close() { } } private void createStateForRestoration(final String changelogTopic) { final Properties producerConfig = new Properties(); producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); try (final KafkaProducer<Integer, Integer> producer = new KafkaProducer<>(producerConfig, new IntegerSerializer(), new IntegerSerializer())) { for (int i = 0; i < numberOfKeys; i++) { producer.send(new ProducerRecord<>(changelogTopic, i, i)); } } } private void setCommittedOffset(final String topic, final int limitDelta) { final Properties consumerConfig = new Properties(); consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, APPID); consumerConfig.put(ConsumerConfig.CLIENT_ID_CONFIG, "commit-consumer"); consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class); consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class); final Consumer consumer = new KafkaConsumer(consumerConfig); final List<TopicPartition> partitions = Arrays.asList( new TopicPartition(topic, 0), new TopicPartition(topic, 1)); consumer.assign(partitions); consumer.seekToEnd(partitions); for (TopicPartition partition : partitions) { final long position = consumer.position(partition); consumer.seek(partition, position - limitDelta); } consumer.commitSync(); consumer.close(); } }
Ishiihara/kafka
streams/src/test/java/org/apache/kafka/streams/integration/RestoreIntegrationTest.java
Java
apache-2.0
18,703
package main import ( "fmt" "sort" "strings" ) type State struct { hand []string discard []string deck []string Picks []string victory int numProv int } func (s *State) NewCopy() State { ds := State{} ds.hand = make([]string, len(s.hand)) ds.discard = make([]string, len(s.discard)) ds.deck = make([]string, len(s.deck)) ds.Picks = make([]string, len(s.Picks)) copy(ds.hand, s.hand) copy(ds.discard, s.discard) copy(ds.deck, s.deck) copy(ds.Picks, s.Picks) ds.victory = -1 ds.numProv = -1 return ds } func (s *State) PickState() string { return strings.Join(s.Picks, " ") } func (s *State) Print() { var c []string c = append(c, "HAND") c = append(c, s.hand...) c = append(c, "DISCARD") c = append(c, s.discard...) c = append(c, "DECK") c = append(c, s.deck...) c = append(c, "PICKS") c = append(c, s.Picks...) fmt.Println(strings.Join(c, " ")) fmt.Printf("Victory points: %d\n", s.TotalVictory()) } func (s *State) StringHand() string { return strings.Join(s.hand, ",") } func (s *State) Init() { for i := 0; i < 3; i++ { s.deck = append(s.deck, "estate") } for i := 0; i < 7; i++ { s.deck = append(s.deck, "copper") } shuffle(s.deck) if s.TotalCards() != 10 { panic("Invalid initialization of state") } s.victory = -1 s.numProv = -1 } func (s *State) drawCards(num int) []string { numcards := len(s.deck) + len(s.discard) if len(s.deck) < num { s.discard = append(s.discard, s.deck...) s.deck = s.deck[:0] shuffle(s.discard) s.deck = make([]string, len(s.discard)) copy(s.deck, s.discard) // s.deck = s.discard s.discard = s.discard[:0] if len(s.discard) != 0 { panic("Coders fault") } } if len(s.deck) < num { // Chapel strategy can trash a lot of cards. num = len(s.deck) } cards := make([]string, num) copy(cards, s.deck[0:num]) s.deck = s.deck[num:] if len(s.deck)+len(s.discard) != numcards-num { panic("Coders fault again") } return cards } func (s *State) DrawHand() { if len(s.hand) != 0 { panic("Already have hand") // fmt.Println("Already have hand") return } cards := s.drawCards(5) s.hand = make([]string, len(cards)) copy(s.hand, cards) } func (s *State) AddToHand(num int) { cards := s.drawCards(num) s.hand = append(s.hand, cards...) } func (s *State) CardInHand(name string) bool { for _, card := range s.hand { if card == name { return true } } return false } func (s *State) CopyHand() []string { cp := make([]string, len(s.hand)) copy(cp, s.hand) return cp } func (s *State) TrashFromHand(indices []int) string { var trashed []string for _, idx := range indices { if idx >= len(s.hand) { panic("Invalid index") } trashed = append(trashed, s.hand[idx]) s.hand[idx] = "" } var newh []string for _, card := range s.hand { if card != "" { newh = append(newh, card) } } s.hand = newh sort.Sort(sort.StringSlice(trashed)) return strings.Join(trashed, ",") } func (s *State) TrashUselessCards() { var newh []string for _, card := range s.hand { if card == "copper" || card == "estate" { continue } newh = append(newh, card) } s.hand = newh } func (s *State) AddCardAndDiscardHand(c string) { s.victory = -1 s.numProv = -1 s.discard = append(s.discard, c) s.Picks = append(s.Picks, c) s.Discard() } func (s *State) Discard() { s.discard = append(s.discard, s.hand...) s.hand = s.hand[:0] if len(s.hand) != 0 { panic("Hand should be zero.") } } func (s *State) Value() int { total := 0 for _, card := range s.hand { total += GetValue(card) } return total } func (s *State) TotalVictory() int { if s.victory != -1 { return s.victory } total := 0 for _, card := range s.discard { total += GetVictory(card) } for _, card := range s.deck { total += GetVictory(card) } for _, card := range s.hand { total += GetVictory(card) } s.victory = total return total } func (s *State) NumProvinces() int { if s.numProv != -1 { return s.numProv } s.numProv = s.TotalCardsByName("province") return s.numProv } func (s *State) TotalCards() int { total := 0 total += len(s.discard) total += len(s.hand) total += len(s.deck) return total } func (s *State) TotalCardsByName(name string) int { total := 0 for _, card := range s.discard { if card == name { total += 1 } } for _, card := range s.deck { if card == name { total += 1 } } for _, card := range s.hand { if card == name { total += 1 } } return total }
manishrjain/dominion
state.go
GO
apache-2.0
4,477
package org.arquillian.algeron.pact.consumer.core; import au.com.dius.pact.consumer.dsl.PactDslWithProvider; import au.com.dius.pact.model.RequestResponsePact; import org.arquillian.algeron.pact.consumer.core.client.StandaloneConsumerPactTest; import org.arquillian.algeron.pact.consumer.spi.Pact; import org.arquillian.algeron.pact.consumer.spi.PactVerification; import org.assertj.core.api.Assertions; import org.jboss.arquillian.test.spi.TestClass; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import java.lang.reflect.Method; import java.util.Optional; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class ConsumerPactTestTest { @Mock PactVerification pactVerification; @Mock PactConsumerConfiguration pactConsumerConfiguration; @Before public void setup() { when(pactVerification.fragment()).thenReturn(""); } @Test public void should_get_pact_from_method() throws NoSuchMethodException { AbstractConsumerPactTest abstractConsumerPactTest = new StandaloneConsumerPactTest(); TestClass testClass = new TestClass(PactMethod.class); final Optional<AbstractConsumerPactTest.PactMethod> pactFragmentMethod = abstractConsumerPactTest.findPactMethod("p1", testClass, pactVerification); final Method annotatedMethod = PactMethod.class.getMethod("contract1", PactDslWithProvider.class); assertThat(pactFragmentMethod.get().getMethod()) .isEqualTo(annotatedMethod); assertThat(pactFragmentMethod.get().getPact()) .isEqualTo(annotatedMethod.getAnnotation(Pact.class)); } @Test public void should_get_pact_from_class() throws NoSuchMethodException { AbstractConsumerPactTest abstractConsumerPactTest = new StandaloneConsumerPactTest(); TestClass testClass = new TestClass(PactClass.class); final Optional<AbstractConsumerPactTest.PactMethod> pactFragmentMethod = abstractConsumerPactTest.findPactMethod("p2", testClass, pactVerification); assertThat(pactFragmentMethod.get().getMethod()) .isEqualTo(PactClass.class.getMethod("contract2", PactDslWithProvider.class)); assertThat(pactFragmentMethod.get().getPact()) .isEqualTo(PactClass.class.getAnnotation(Pact.class)); } @Test public void should_give_preference_to_method_annotation() throws NoSuchMethodException { AbstractConsumerPactTest abstractConsumerPactTest = new StandaloneConsumerPactTest(); TestClass testClass = new TestClass(PactMethodClass.class); final Optional<AbstractConsumerPactTest.PactMethod> pactFragmentMethod = abstractConsumerPactTest.findPactMethod("p4", testClass, pactVerification); final Method annotatedMethod = PactMethodClass.class.getMethod("contract3", PactDslWithProvider.class); assertThat(pactFragmentMethod.get().getMethod()) .isEqualTo(annotatedMethod); assertThat(pactFragmentMethod.get().getPact()) .isEqualTo(annotatedMethod.getAnnotation(Pact.class)); } @Test public void should_ignore_class_annotation_if_annotated_method() throws NoSuchMethodException { AbstractConsumerPactTest abstractConsumerPactTest = new StandaloneConsumerPactTest(); TestClass testClass = new TestClass(PactMethodClass.class); final Optional<AbstractConsumerPactTest.PactMethod> pactFragmentMethod = abstractConsumerPactTest.findPactMethod("p3", testClass, pactVerification); Assertions.assertThat(pactFragmentMethod).isNotPresent(); } @Test public void should_get_provider_name_from_pact_class_annotation() { AbstractConsumerPactTest abstractConsumerPactTest = new StandaloneConsumerPactTest(); TestClass testClass = new TestClass(PactClassClassProvider.class); when(pactVerification.value()).thenReturn(""); final String provider = abstractConsumerPactTest.getProvider(testClass, pactVerification); assertThat(provider).isEqualTo("p2"); } @Test public void should_get_provider_name_from_pact_provider_method_annotation() { AbstractConsumerPactTest abstractConsumerPactTest = new StandaloneConsumerPactTest(); TestClass testClass = new TestClass(PactClassMethodProvider.class); when(pactVerification.value()).thenReturn("p3"); final String provider = abstractConsumerPactTest.getProvider(testClass, pactVerification); assertThat(provider).isEqualTo("p3"); } @Test public void should_get_provider_name_from_configuration() { AbstractConsumerPactTest abstractConsumerPactTest = new StandaloneConsumerPactTest(); TestClass testClass = new TestClass(PactMethodPactVerificationWithoutProvider.class); when(pactVerification.value()).thenReturn(""); when(pactConsumerConfiguration.getProvider()).thenReturn("p4"); when(pactConsumerConfiguration.isProviderSet()).thenReturn(true); abstractConsumerPactTest.pactConsumerConfigurationInstance = () -> pactConsumerConfiguration; final String provider = abstractConsumerPactTest.getProvider(testClass, pactVerification); assertThat(provider).isEqualTo("p4"); } public static class PactMethod { @Pact(consumer = "c1", provider = "p1") public RequestResponsePact contract1(PactDslWithProvider builder) { return null; } } @Pact(consumer = "c2", provider = "p2") public static class PactClass { public RequestResponsePact contract2(PactDslWithProvider builder) { return null; } } @Pact(consumer = "c3", provider = "p3") public static class PactMethodClass { @Pact(consumer = "c4", provider = "p4") public RequestResponsePact contract3(PactDslWithProvider builder) { return null; } } @Pact(consumer = "c2", provider = "p2") public static class PactClassClassProvider { public RequestResponsePact contract2(PactDslWithProvider builder) { return null; } } @Pact(consumer = "c2", provider = "p2") public static class PactClassMethodProvider { public RequestResponsePact contract2(PactDslWithProvider builder) { return null; } } public static class PactMethodPactVerificationWithoutProvider { @Pact(consumer = "c1", provider = "p1") public RequestResponsePact contract1(PactDslWithProvider builder) { return null; } } }
arquillian/arquillian-algeron
pact/consumer/core/src/test/java/org/arquillian/algeron/pact/consumer/core/ConsumerPactTestTest.java
Java
apache-2.0
6,791
# Persicaria utriculosa (Tikovsky) Soják SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Caryophyllales/Polygonaceae/Persicaria/Persicaria utriculosa/README.md
Markdown
apache-2.0
189
--- layout: post title: "pip install guide" subtitle: "pip install guide" date: 2016-12-07 22:00:00 author: "Haoking" header-img: "img/post-bg-re-vs-ng2.jpg" tags: - iOS - Front-end - OSX - pip - python --- > [Please indicate the source of forwarding and be a follower of my Github](https://github.com/haoking). ## **pip install** **pip is a package management system used to install and manage softeare packages written in Python**. when you install pip on an old OSX, you can follow the example from offcial website : ```shell pip install ``` But sometimes there will be something wrong here on lastest OSX, and this is because of the new SIP protection mechanism on lastest OSX. Just use this way: ```shell pip install --user ```
haoking/haoking.github.io
_posts/2016-12-07-pip-install.markdown
Markdown
apache-2.0
789
package foo.user; import javax.persistence.*; import org.springframework.stereotype.Repository; import org.springframework.transaction.annotation.Transactional; @Repository @Transactional(readOnly = true) public class UserRepository { @PersistenceContext private EntityManager entityManager; @Transactional public void save(User user) { entityManager.persist(user); } public User findByUsername(String username) { try { return entityManager.createNamedQuery(User.FIND_BY_USERNAME, User.class) .setParameter("username", username) .getSingleResult(); } catch (PersistenceException e) { return null; } } }
jordanbaucke/spring-fabric
src/main/java/foo/user/UserRepository.java
Java
apache-2.0
672
package com.buddysearch.android.data.util; import lombok.experimental.UtilityClass; @UtilityClass public class StringUtil { /** * Examples: * concatLinearly("_", one, 123) -> one_123 * concatLinearly("-", null, 123) -> 123 * concatLinearly("-", one, null) -> one */ public static String concatLinearly(String separator, String... strings) { String fullString = ""; if (separator == null || strings == null) { return null; } for (int i = 0; i < strings.length; i++) { String s = strings[i]; if (s != null && s.length() > 0) { fullString += s; if (i < strings.length - 1) { fullString += separator; } } } return fullString; } }
ihorvitruk/buddysearch
data/src/main/java/com/buddysearch/android/data/util/StringUtil.java
Java
apache-2.0
831
/** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "processors/InvokeHTTP.h" #include <regex.h> #include <curl/curlbuild.h> #include <curl/easy.h> #include <uuid/uuid.h> #include <memory> #include <algorithm> #include <cctype> #include <cstdint> #include <cstring> #include <iostream> #include <iterator> #include <map> #include <set> #include <string> #include <utility> #include <vector> #include "core/FlowFile.h" #include "core/logging/Logger.h" #include "core/ProcessContext.h" #include "core/Relationship.h" #include "io/DataStream.h" #include "io/StreamFactory.h" #include "ResourceClaim.h" #include "utils/StringUtils.h" namespace org { namespace apache { namespace nifi { namespace minifi { namespace processors { const char *InvokeHTTP::ProcessorName = "InvokeHTTP"; core::Property InvokeHTTP::Method("HTTP Method", "HTTP request method (GET, POST, PUT, PATCH, DELETE, HEAD, OPTIONS). " "Arbitrary methods are also supported. Methods other than POST, PUT and PATCH will be sent without a message body.", "GET"); core::Property InvokeHTTP::URL("Remote URL", "Remote URL which will be connected to, including scheme, host, port, path.", ""); core::Property InvokeHTTP::ConnectTimeout("Connection Timeout", "Max wait time for connection to remote service.", "5 secs"); core::Property InvokeHTTP::ReadTimeout("Read Timeout", "Max wait time for response from remote service.", "15 secs"); core::Property InvokeHTTP::DateHeader("Include Date Header", "Include an RFC-2616 Date header in the request.", "True"); core::Property InvokeHTTP::FollowRedirects("Follow Redirects", "Follow HTTP redirects issued by remote server.", "True"); core::Property InvokeHTTP::AttributesToSend("Attributes to Send", "Regular expression that defines which attributes to send as HTTP" " headers in the request. If not defined, no attributes are sent as headers.", ""); core::Property InvokeHTTP::SSLContext("SSL Context Service", "The SSL Context Service used to provide client certificate information for TLS/SSL (https) connections.", ""); core::Property InvokeHTTP::ProxyHost("Proxy Host", "The fully qualified hostname or IP address of the proxy server", ""); core::Property InvokeHTTP::ProxyPort("Proxy Port", "The port of the proxy server", ""); core::Property InvokeHTTP::ProxyUser("invokehttp-proxy-user", "Username to set when authenticating against proxy", ""); core::Property InvokeHTTP::ProxyPassword("invokehttp-proxy-password", "Password to set when authenticating against proxy", ""); core::Property InvokeHTTP::ContentType("Content-type", "The Content-Type to specify for when content is being transmitted through a PUT, " "POST or PATCH. In the case of an empty value after evaluating an expression language expression, " "Content-Type defaults to", "application/octet-stream"); core::Property InvokeHTTP::SendBody("send-message-body", "If true, sends the HTTP message body on POST/PUT/PATCH requests (default). " "If false, suppresses the message body and content-type header for these requests.", "true"); core::Property InvokeHTTP::PropPutOutputAttributes("Put Response Body in Attribute", "If set, the response body received back will be put into an attribute of the original " "FlowFile instead of a separate FlowFile. The attribute key to put to is determined by evaluating value of this property. ", ""); core::Property InvokeHTTP::AlwaysOutputResponse("Always Output Response", "Will force a response FlowFile to be generated and routed to the 'Response' relationship " "regardless of what the server status code received is ", "false"); core::Property InvokeHTTP::PenalizeOnNoRetry("Penalize on \"No Retry\"", "Enabling this property will penalize FlowFiles that are routed to the \"No Retry\" relationship.", "false"); const char* InvokeHTTP::STATUS_CODE = "invokehttp.status.code"; const char* InvokeHTTP::STATUS_MESSAGE = "invokehttp.status.message"; const char* InvokeHTTP::RESPONSE_BODY = "invokehttp.response.body"; const char* InvokeHTTP::REQUEST_URL = "invokehttp.request.url"; const char* InvokeHTTP::TRANSACTION_ID = "invokehttp.tx.id"; const char* InvokeHTTP::REMOTE_DN = "invokehttp.remote.dn"; const char* InvokeHTTP::EXCEPTION_CLASS = "invokehttp.java.exception.class"; const char* InvokeHTTP::EXCEPTION_MESSAGE = "invokehttp.java.exception.message"; core::Relationship InvokeHTTP::Success("success", "All files are routed to success"); core::Relationship InvokeHTTP::RelResponse("response", "Represents a response flowfile"); core::Relationship InvokeHTTP::RelRetry("retry", "The original FlowFile will be routed on any status code that can be retried " "(5xx status codes). It will have new attributes detailing the request."); core::Relationship InvokeHTTP::RelNoRetry("no retry", "The original FlowFile will be routed on any status code that should NOT " "be retried (1xx, 3xx, 4xx status codes). It will have new attributes detailing the request."); core::Relationship InvokeHTTP::RelFailure("failure", "The original FlowFile will be routed on any type of connection failure, " "timeout or general exception. It will have new attributes detailing the request."); void InvokeHTTP::set_request_method(CURL *curl, const std::string &method) { std::string my_method = method; std::transform(my_method.begin(), my_method.end(), my_method.begin(), ::toupper); if (my_method == "POST") { curl_easy_setopt(curl, CURLOPT_POST, 1); } else if (my_method == "PUT") { curl_easy_setopt(curl, CURLOPT_UPLOAD, 1); } else if (my_method == "GET") { } else { curl_easy_setopt(curl, CURLOPT_CUSTOMREQUEST, my_method.c_str()); } } void InvokeHTTP::initialize() { logger_->log_info("Initializing InvokeHTTP"); // Set the supported properties std::set<core::Property> properties; properties.insert(Method); properties.insert(URL); properties.insert(ConnectTimeout); properties.insert(ReadTimeout); properties.insert(DateHeader); properties.insert(AttributesToSend); properties.insert(SSLContext); properties.insert(ProxyHost); properties.insert(ProxyPort); properties.insert(ProxyUser); properties.insert(ProxyPassword); properties.insert(ContentType); properties.insert(SendBody); properties.insert(AlwaysOutputResponse); setSupportedProperties(properties); // Set the supported relationships std::set<core::Relationship> relationships; relationships.insert(Success); setSupportedRelationships(relationships); } void InvokeHTTP::onSchedule(core::ProcessContext *context, core::ProcessSessionFactory *sessionFactory) { if (!context->getProperty(Method.getName(), method_)) { logger_->log_info("%s attribute is missing, so default value of %s will be used", Method.getName().c_str(), Method.getValue().c_str()); return; } if (!context->getProperty(URL.getName(), url_)) { logger_->log_info("%s attribute is missing, so default value of %s will be used", URL.getName().c_str(), URL.getValue().c_str()); return; } std::string timeoutStr; if (context->getProperty(ConnectTimeout.getName(), timeoutStr)) { core::Property::StringToInt(timeoutStr, connect_timeout_); // set the timeout in curl options. } else { logger_->log_info("%s attribute is missing, so default value of %s will be used", ConnectTimeout.getName().c_str(), ConnectTimeout.getValue().c_str()); return; } if (context->getProperty(ReadTimeout.getName(), timeoutStr)) { core::Property::StringToInt(timeoutStr, read_timeout_); } else { logger_->log_info("%s attribute is missing, so default value of %s will be used", ReadTimeout.getName().c_str(), ReadTimeout.getValue().c_str()); } std::string dateHeaderStr; if (!context->getProperty(DateHeader.getName(), dateHeaderStr)) { logger_->log_info("%s attribute is missing, so default value of %s will be used", DateHeader.getName().c_str(), DateHeader.getValue().c_str()); } date_header_include_ = utils::StringUtils::StringToBool(dateHeaderStr, date_header_include_); if (!context->getProperty(PropPutOutputAttributes.getName(), put_attribute_name_)) { logger_->log_info("%s attribute is missing, so default value of %s will be used", PropPutOutputAttributes.getName().c_str(), PropPutOutputAttributes.getValue().c_str()); } if (!context->getProperty(AttributesToSend.getName(), attribute_to_send_regex_)) { logger_->log_info("%s attribute is missing, so default value of %s will be used", AttributesToSend.getName().c_str(), AttributesToSend.getValue().c_str()); } std::string always_output_response = "false"; if (!context->getProperty(AlwaysOutputResponse.getName(), always_output_response)) { logger_->log_info("%s attribute is missing, so default value of %s will be used", AttributesToSend.getName().c_str(), AttributesToSend.getValue().c_str()); } utils::StringUtils::StringToBool(always_output_response, always_output_response_); std::string penalize_no_retry = "false"; if (!context->getProperty(PenalizeOnNoRetry.getName(), penalize_no_retry)) { logger_->log_info("%s attribute is missing, so default value of %s will be used", AttributesToSend.getName().c_str(), AttributesToSend.getValue().c_str()); } utils::StringUtils::StringToBool(penalize_no_retry, penalize_no_retry_); std::string context_name; if (context->getProperty(SSLContext.getName(), context_name) && !IsNullOrEmpty(context_name)) { std::shared_ptr<core::controller::ControllerService> service = context->getControllerService(context_name); if (nullptr != service) { ssl_context_service_ = std::static_pointer_cast<minifi::controllers::SSLContextService>(service); } } } InvokeHTTP::~InvokeHTTP() { curl_global_cleanup(); } inline bool InvokeHTTP::matches(const std::string &value, const std::string &sregex) { if (sregex == ".*") return true; regex_t regex; int ret = regcomp(&regex, sregex.c_str(), 0); if (ret) return false; ret = regexec(&regex, value.c_str(), (size_t) 0, NULL, 0); regfree(&regex); if (ret) return false; return true; } std::string InvokeHTTP::generateId() { uuid_t txId; uuid_generate(txId); char uuidStr[37]; uuid_unparse_lower(txId, uuidStr); return uuidStr; } bool InvokeHTTP::emitFlowFile(const std::string &method) { return ("POST" == method || "PUT" == method || "PATCH" == method); } struct curl_slist *InvokeHTTP::build_header_list(CURL *curl, std::string regex, const std::map<std::string, std::string> &attributes) { struct curl_slist *list = NULL; if (curl) { for (auto attribute : attributes) { if (matches(attribute.first, regex)) { std::string attr = attribute.first + ":" + attribute.second; list = curl_slist_append(list, attr.c_str()); } } } return list; } bool InvokeHTTP::isSecure(const std::string &url) { if (url.find("https") != std::string::npos) { return true; } return false; } CURLcode InvokeHTTP::configure_ssl_context(CURL *curl, void *ctx, void *param) { minifi::controllers::SSLContextService *ssl_context_service = static_cast<minifi::controllers::SSLContextService*>(param); if (!ssl_context_service->configure_ssl_context(static_cast<SSL_CTX*>(ctx))) { return CURLE_FAILED_INIT; } return CURLE_OK; } void InvokeHTTP::configure_secure_connection(CURL *http_session) { logger_->log_debug("InvokeHTTP -- Using certificate file %s", ssl_context_service_->getCertificateFile()); curl_easy_setopt(http_session, CURLOPT_VERBOSE, 1L); curl_easy_setopt(http_session, CURLOPT_SSL_CTX_FUNCTION, &InvokeHTTP::configure_ssl_context); curl_easy_setopt(http_session, CURLOPT_SSL_CTX_DATA, static_cast<void*>(ssl_context_service_.get())); } void InvokeHTTP::onTrigger(core::ProcessContext *context, core::ProcessSession *session) { std::shared_ptr<FlowFileRecord> flowFile = std::static_pointer_cast<FlowFileRecord>(session->get()); logger_->log_info("onTrigger InvokeHTTP with %s", method_.c_str()); if (flowFile == nullptr) { if (!emitFlowFile(method_)) { logger_->log_info("InvokeHTTP -- create flow file with %s", method_.c_str()); flowFile = std::static_pointer_cast<FlowFileRecord>(session->create()); } else { logger_->log_info("exiting because method is %s", method_.c_str()); return; } } else { logger_->log_info("InvokeHTTP -- Received flowfile "); } // create a transaction id std::string tx_id = generateId(); CURL *http_session = curl_easy_init(); // set the HTTP request method from libCURL set_request_method(http_session, method_); if (isSecure(url_) && ssl_context_service_ != nullptr) { configure_secure_connection(http_session); } curl_easy_setopt(http_session, CURLOPT_URL, url_.c_str()); if (connect_timeout_ > 0) { curl_easy_setopt(http_session, CURLOPT_TIMEOUT, connect_timeout_); } if (read_timeout_ > 0) { curl_easy_setopt(http_session, CURLOPT_TIMEOUT, read_timeout_); } utils::HTTPRequestResponse content; curl_easy_setopt(http_session, CURLOPT_WRITEFUNCTION, &utils::HTTPRequestResponse::recieve_write); curl_easy_setopt(http_session, CURLOPT_WRITEDATA, static_cast<void*>(&content)); if (emitFlowFile(method_)) { logger_->log_info("InvokeHTTP -- reading flowfile"); std::shared_ptr<ResourceClaim> claim = flowFile->getResourceClaim(); if (claim) { utils::ByteInputCallBack *callback = new utils::ByteInputCallBack(); session->read(flowFile, callback); utils::CallBackPosition *callbackObj = new utils::CallBackPosition; callbackObj->ptr = callback; callbackObj->pos = 0; logger_->log_info("InvokeHTTP -- Setting callback"); curl_easy_setopt(http_session, CURLOPT_UPLOAD, 1L); curl_easy_setopt(http_session, CURLOPT_INFILESIZE_LARGE, (curl_off_t)callback->getBufferSize()); curl_easy_setopt(http_session, CURLOPT_READFUNCTION, &utils::HTTPRequestResponse::send_write); curl_easy_setopt(http_session, CURLOPT_READDATA, static_cast<void*>(callbackObj)); } else { logger_->log_error("InvokeHTTP -- no resource claim"); } } else { logger_->log_info("InvokeHTTP -- Not emitting flowfile to HTTP Server"); } // append all headers struct curl_slist *headers = build_header_list(http_session, attribute_to_send_regex_, flowFile->getAttributes()); curl_easy_setopt(http_session, CURLOPT_HTTPHEADER, headers); logger_->log_info("InvokeHTTP -- curl performed"); res = curl_easy_perform(http_session); if (res == CURLE_OK) { logger_->log_info("InvokeHTTP -- curl successful"); bool putToAttribute = !IsNullOrEmpty(put_attribute_name_); std::string response_body(content.data.begin(), content.data.end()); int64_t http_code = 0; curl_easy_getinfo(http_session, CURLINFO_RESPONSE_CODE, &http_code); char *content_type; /* ask for the content-type */ curl_easy_getinfo(http_session, CURLINFO_CONTENT_TYPE, &content_type); flowFile->addAttribute(STATUS_CODE, std::to_string(http_code)); flowFile->addAttribute(STATUS_MESSAGE, response_body); flowFile->addAttribute(REQUEST_URL, url_); flowFile->addAttribute(TRANSACTION_ID, tx_id); bool isSuccess = ((int32_t) (http_code / 100)) == 2 && res != CURLE_ABORTED_BY_CALLBACK; bool output_body_to_requestAttr = (!isSuccess || putToAttribute) && flowFile != nullptr; bool output_body_to_content = isSuccess && !putToAttribute; bool body_empty = IsNullOrEmpty(content.data); logger_->log_info("isSuccess: %d", isSuccess); std::shared_ptr<FlowFileRecord> response_flow = nullptr; if (output_body_to_content) { if (flowFile != nullptr) { response_flow = std::static_pointer_cast<FlowFileRecord>(session->create(flowFile)); } else { response_flow = std::static_pointer_cast<FlowFileRecord>(session->create()); } std::string ct = content_type; response_flow->addKeyedAttribute(MIME_TYPE, ct); response_flow->addAttribute(STATUS_CODE, std::to_string(http_code)); response_flow->addAttribute(STATUS_MESSAGE, response_body); response_flow->addAttribute(REQUEST_URL, url_); response_flow->addAttribute(TRANSACTION_ID, tx_id); io::DataStream stream((const uint8_t*) content.data.data(), content.data.size()); // need an import from the data stream. session->importFrom(stream, response_flow); } else { logger_->log_info("Cannot output body to content"); response_flow = std::static_pointer_cast<FlowFileRecord>(session->create()); } route(flowFile, response_flow, session, context, isSuccess, http_code); } else { logger_->log_error("InvokeHTTP -- curl_easy_perform() failed %s\n", curl_easy_strerror(res)); } curl_slist_free_all(headers); curl_easy_cleanup(http_session); } void InvokeHTTP::route(std::shared_ptr<FlowFileRecord> &request, std::shared_ptr<FlowFileRecord> &response, core::ProcessSession *session, core::ProcessContext *context, bool isSuccess, int statusCode) { // check if we should yield the processor if (!isSuccess && request == nullptr) { context->yield(); } // If the property to output the response flowfile regardless of status code is set then transfer it bool responseSent = false; if (always_output_response_ && response != nullptr) { session->transfer(response, Success); responseSent = true; } // transfer to the correct relationship // 2xx -> SUCCESS if (isSuccess) { // we have two flowfiles to transfer if (request != nullptr) { session->transfer(request, Success); } if (response != nullptr && !responseSent) { session->transfer(response, Success); } // 5xx -> RETRY } else if (statusCode / 100 == 5) { if (request != nullptr) { session->penalize(request); session->transfer(request, RelRetry); } // 1xx, 3xx, 4xx -> NO RETRY } else { if (request != nullptr) { if (penalize_no_retry_) { session->penalize(request); } session->transfer(request, RelNoRetry); } } } } /* namespace processors */ } /* namespace minifi */ } /* namespace nifi */ } /* namespace apache */ } /* namespace org */
brosander/nifi-minifi-cpp
libminifi/src/processors/InvokeHTTP.cpp
C++
apache-2.0
19,667
#!/usr/bin/env bash ############ CODE ZCodeConfig() { if [ -e /opt/code ] && [ "$(uname)" != "Darwin" ] ; then export ZCODEDIR=${ZCODEDIR:-/opt/code} else export ZCODEDIR=${ZCODEDIR:-~/code} fi } ZCodeGetJSUsage() { cat <<EOF Usage: ZCodeGet [-r reponame] [-g giturl] [-a account] [-b branch] -r reponame: name or repo which is being downloaded -b branchname: defaults to development -h: help check's out jumpscale repo to $ZCODEDIR/github/jumpscale/$reponame branchname can optionally be specified. if specified but repo exists then a pull will be done & branch will be ignored !!! if reponame not specified then will checkout - bash - lib9 - core9 - ays9 - prefab EOF } ZCodeGetJS() { echo FUNCTION: ${FUNCNAME[0]} >> $ZLogFile ZCodeConfig || return 1 local OPTIND local account='jumpscale' local reponame='' local branch=${JS9BRANCH:-development} while getopts "r:b:h" opt; do case $opt in r ) reponame=$OPTARG ;; b ) branch=$OPTARG ;; h ) ZCodeGetJSUsage ; return 0 ;; \? ) ZCodeGetJSUsage ; return 1 ;; esac done if [ -z "$reponame" ]; then ZCodeGetJS -r core9 -b $branch || return 1 ZCodeGetJS -r lib9 -b $branch || return 1 ZCodeGetJS -r bash -b $branch || return 1 ZCodeGetJS -r ays9 -b $branch || return 1 ZCodeGetJS -r prefab9 -b $branch || return 1 return 0 fi # local giturl="git@github.com:Jumpscale/$reponame.git" local githttpsurl="https://github.com/jumpscale/$reponame.git" # check if specificed branch or $JS9BRANCH exist, if not then fallback to development JS9BRANCHExists ${githttpsurl} ${branch} || branch=development # ZCodeGet -r $reponame -a $account -u $giturl -b $branch || ZCodeGet -r $reponame -a $account -u $githttpsurl -b $branch || return 1 ZCodeGet -r $reponame -a $account -u $githttpsurl -b $branch || return 1 } ZCodeGetUsage() { cat <<EOF Usage: ZCodeGet [-r reponame] [-g giturl] [-a account] [-b branch] -t type: default is github but could be e.g. gitlab, ... -a account: will default to 'varia', but can be account name -r reponame: name or repo which is being downloaded -u giturl: e.g. git@github.com:mathieuancelin/duplicates.git -b branchname: defaults to development -k sshkey: path to sshkey to use for authorization when connecting to the repository. -h: help check's out any git repo repo to $ZCODEDIR/$type/$account/$reponame branchname can optionally be specified. if specified but repo exists then a pull will be done & branch will be ignored !!! EOF } #to return to original dir do Z_pushd ZCodeGet() { echo FUNCTION: ${FUNCNAME[0]} > $ZLogFile ZCodeConfig || return 1 local OPTIND local type='github' local account='varia' local reponame='' local giturl='' local branch=${JS9BRANCH:-development} local sshkey='' while getopts "a:r:u:b:t:k:h" opt; do case $opt in a ) account=$OPTARG ;; t ) type=$OPTARG ;; r ) reponame=$OPTARG ;; u ) giturl=$OPTARG ;; b ) branch=$OPTARG ;; k ) sshkey=$OPTARG ;; h ) ZCodeGetUsage ; return 0 ;; \? ) ZCodeGetUsage ; return 1 ;; esac done if [ -z "$giturl" ]; then ZCodeGetUsage return 0 fi if [ -z "$reponame" ]; then ZCodeGetUsage return 0 fi echo "[+] get code $giturl ($branch)" Z_mkdir_pushd $ZCODEDIR/$type/$account || return 1 # check if docs.greenitglobe.com (gogs) in the url if grep -q docs.greenitglobe.com <<< $giturl; then ssh-keyscan -t rsa docs.greenitglobe.com >> ~/.ssh/known_hosts 2>&1 >> $ZLogFile || die "ssh keyscan" || return 1 fi if ! grep -q ^github.com ~/.ssh/known_hosts 2> /dev/null; then ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts 2>&1 >> $ZLogFile || die "ssh keyscan" || return 1 fi if [ ! -e $ZCODEDIR/$type/$account/$reponame ]; then echo " [+] clone" git clone -b ${branch} $giturl $reponame 2>&1 >> $ZLogFile || die "git clone" || return 1 else Z_pushd $ZCODEDIR/$type/$account/$reponame || return 1 echo " [+] pull" echo 'git pull' >> $ZLogFile git pull 2>&1 >> $ZLogFile || die "could not git pull" || return 1 Z_popd || return 1 fi Z_popd || return 1 } ZCodePushUsage(){ cat <<EOF Usage: ZCodePush [-r reponame] [-a account] [-m message] -t type: default is github but could be e.g. gitlab, ... -a account: will default to 'varia', but can be account name -r reponame: name or repo -m message for commit: required ! -h: help will add/remove files, commit, pull & push EOF } ZCodePush() { echo FUNCTION: ${FUNCNAME[0]} >> $ZLogFile ZCodeConfig || return 1 local OPTIND local type='github' local account='varia' local reponame='' local message='' while getopts "a:r:m:t:h" opt; do case $opt in t ) type=$OPTARG ;; a ) account=$OPTARG ;; r ) reponame=$OPTARG ;; m ) message=$OPTARG ;; h ) ZCodePushUsage ; return 0 ;; \? ) ZCodePushUsage ; return 1 ;; esac done if [ -z "$message" ]; then ZCodePushUsage return fi if [ -z "$account" ]; then ZCodePushUsage return fi if [ -z "$reponame" ]; then echo "[+] walk over directories: $ZCODEDIR/$type/$account" # Z_pushd $ZCODEDIR/$type/$account || return 1 ls -d $ZCODEDIR/$type/$account/*/ | { # find . -mindepth 1 -maxdepth 1 -type d | { while read DIRPATH ; do DIRNAME=$(basename $DIRPATH) || die "basename" || return 1 ZCodePush -a $account -r $DIRNAME -m $message || return 1 done } # Z_popd || return 1 return fi echo "[+] commit-pull-push code $ZCODEDIR/$type/$account/$reponame" Z_pushd $ZCODEDIR/$type/$account > /dev/null 2>&1 || die || return 1 if [ ! -e $ZCODEDIR/$type/$account/$reponame ]; then die "could not find $ZCODEDIR/$type/$account/$reponame" || return 1 else Z_pushd $ZCODEDIR/$type/$account/$reponame || return 1 echo " [+] add" git add . -A 2>&1 >> $ZLogFile #|| die "ZCodePush (add) $@" || return 1 echo " [+] commit" git commit -m '$message' 2>&1 >> $ZLogFile #|| die "ZCodePush (commit) $@" || return 1 echo " [+] pull" git pull 2>&1 >> $ZLogFile || die "ZCodePush (pull) $@" || return 1 echo " [+] push" git push 2>&1 >> $ZLogFile || die "ZCodePush (push) $@" || return 1 Z_popd || return 1 fi Z_popd || return 1 } ZCodePushJSUsage(){ cat <<EOF Usage: ZCodePushJS [-r reponame] [-a account] [-m message] -r reponame: name or repo -m message for commit: required ! -h: help will add/remove files, commit, pull & push EOF } ZCodePushJS(){ echo FUNCTION: ${FUNCNAME[0]} >> $ZLogFile ZCodeConfig || return 1 local OPTIND local reponame='' local message='' while getopts "r:m:h" opt; do case $opt in r ) reponame=$OPTARG ;; m ) message=$OPTARG ;; h ) ZCodePushJSUsage ; return 0 ;; \? ) ZCodePushJSUsage ; return 1 ;; esac done if [ -z "$message" ]; then ZCodePushJSUsage return fi if [ "$reponame" = "" ]; then ZCodePush -a jumpscale -m $message || die "$@" || return 1 else ZCodePush -a jumpscale -r $reponame -m $message || die "$@" || return 1 fi } JS9BRANCHExists() { local giturl="$1" local branch=${2:-${JS9BRANCH}} # remove the trailing .git from the giturl if exist giturl=${giturl%.git} echo "[+] Checking if ${giturl}/tree/${branch} exists" httpcode=$(curl -o /dev/null -I -s --write-out '%{http_code}\n' $giturl/tree/${branch}) if [ "$httpcode" = "200" ]; then return 0 else echo "[+] Error: ${giturl}/tree/${branch} does not exist" return 1 fi }
Jumpscale/core9
bash/lib/code_lib.sh
Shell
apache-2.0
8,213
// native Promises are supported from Node.js 6.10 on AWS lambdas module.exports.handler = function (event, context) { if (event.succeed) { const delay = event.succeed.delay; const result = event.succeed.result; return new Promise((resolve) => setTimeout(resolve,delay)) .then(() => result) } if (event.fail) { const delay = event.fail.delay; const err = event.fail.err; return new Promise((resolve) => setTimeout(resolve,delay)) .then(() => { throw new Error(err) }); } };
bbc/aws-lambda-runner
spec/test_promise.js
JavaScript
apache-2.0
602
# Valeriana minutiflora Hand.-Mazz. SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Dipsacales/Valerianaceae/Valeriana/Valeriana minutiflora/README.md
Markdown
apache-2.0
183
<?php /* Smarty version Smarty3-b8, created on 2011-05-25 17:00:01 compiled from "D:\htdocs\yulog.net.shehua\backadmin/Template/product_detail.html" */ ?> <?php /*%%SmartyHeaderCode:203404ddcc511941f53-64435125%%*/if(!defined('SMARTY_DIR')) exit('no direct access allowed'); $_smarty_tpl->decodeProperties(array ( 'file_dependency' => array ( '81be815715f51430c231e3f87aefdaf3814e919c' => array ( 0 => 'D:\\htdocs\\yulog.net.shehua\\backadmin/Template/product_detail.html', 1 => 1305857078, ), ), 'nocache_hash' => '203404ddcc511941f53-64435125', 'function' => array ( ), 'has_nocache_code' => false, )); /*/%%SmartyHeaderCode%%*/?> <?php if (!is_callable('smarty_function_brite_spot')) include 'D:\htdocs\yulog.net.shehua\smarty\plugins\function.brite_spot.php'; if (!is_callable('smarty_modifier_cat')) include 'D:\htdocs\yulog.net.shehua\smarty\plugins\modifier.cat.php'; if (!is_callable('smarty_function_Goto_diploma')) include 'D:\htdocs\yulog.net.shehua\smarty\plugins\function.Goto_diploma.php'; if (!is_callable('smarty_modifier_date_format')) include 'D:\htdocs\yulog.net.shehua\smarty\plugins\modifier.date_format.php'; ?><table border="0" cellspacing="1" cellpadding="0" width="100%"> <tr> <td width="10%" align="right" class="Need">编号</td> <td><?php echo smarty_function_brite_spot(array('query'=>$_smarty_tpl->getVariable('get')->value['info'],'str'=>$_smarty_tpl->getVariable('show')->value['ProID']),$_smarty_tpl->smarty,$_smarty_tpl);?> </td> <td width="10%" align="right">原编号</td> <td><?php echo smarty_function_brite_spot(array('query'=>$_smarty_tpl->getVariable('get')->value['info'],'str'=>$_smarty_tpl->getVariable('show')->value['previousNO']),$_smarty_tpl->smarty,$_smarty_tpl);?> </td> </tr> <tr> <td align="right" class="Need">状态</td> <td><?php echo $_smarty_tpl->getVariable('status')->value[$_smarty_tpl->getVariable('show')->value['status']];?> (<?php echo $_smarty_tpl->getVariable('show')->value['status'];?> )</td> <td align="right">数量</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['amount'];?> </td> </tr> <tr> <td align="right">形状</td> <td><?php echo $_smarty_tpl->getVariable('shape')->value[$_smarty_tpl->getVariable('show')->value['shape']];?> (<?php echo $_smarty_tpl->getVariable('show')->value['shape'];?> )</td> <td align="right">重量</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['weight'];?> </td> </tr> <tr> <td align="right">颜色</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['color'];?> </td> <td align="right">净度</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['clarity'];?> </td> </tr> <tr> <td align="right">切工</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['cut'];?> </td> <td align="right">抛光</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['buffing'];?> </td> </tr> <tr> <td align="right">对称</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['symmetry'];?> </td> <td align="right">荧光强度</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['Fent_Isity'];?> </td> </tr> <tr> <td align="right">荧光颜色</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['Fent_color'];?> </td> <td align="right">测量值</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['scalar_value'];?> </td> </tr> <tr> <td align="right">全身比</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['body_ratio'];?> %</td> <td align="right">台宽比</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['table_width'];?> %</td> </tr> <tr> <td align="right">证书</td> <td colspan="3"> <?php ob_start();?><?php echo smarty_function_brite_spot(array('query'=>$_smarty_tpl->getVariable('get')->value['info'],'str'=>smarty_modifier_cat(smarty_modifier_cat($_smarty_tpl->getVariable('show')->value['diploma'],' - '),$_smarty_tpl->getVariable('show')->value['diplomaNO'])),$_smarty_tpl->smarty,$_smarty_tpl);?> <?php $_tmp1=ob_get_clean();?><?php echo smarty_function_Goto_diploma(array('diploma'=>$_smarty_tpl->getVariable('show')->value['diploma'],'weight'=>$_smarty_tpl->getVariable('show')->value['weight'],'NO'=>$_smarty_tpl->getVariable('show')->value['diplomaNO'],'show'=>$_tmp1),$_smarty_tpl->smarty,$_smarty_tpl);?> <?php if ($_smarty_tpl->getVariable('show')->value['diplomaPhoto']){?><a style="margin:0 0 0 2em" href="<?php echo $_smarty_tpl->getVariable('show')->value['diplomaPhoto'];?> " target="_<?php echo $_smarty_tpl->getVariable('show')->value['ProID'];?> ">证书图</a><?php }?> </td> </tr> <tr> <td align="right">产品来源</td> <td> <?php echo $_smarty_tpl->getVariable('proSource')->value[$_smarty_tpl->getVariable('show')->value['proSource']];?> <?php if ($_smarty_tpl->getVariable('show')->value['proSource']!='self'){?><span style="margin:0 0 0 2em">商家ID:<?php echo $_smarty_tpl->getVariable('show')->value['sellerID'];?> </span><?php }?> </td> <td align="right">库存地点</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['stockAddress'];?> </td> </tr> <tr> <td align="right">是否促销</td> <td colspan="3"> <?php if ($_smarty_tpl->getVariable('show')->value['is_promotion']=='N'||$_smarty_tpl->getVariable('show')->value['promotion_start']>time()||time()>$_smarty_tpl->getVariable('show')->value['promotion_stop']){?>否<?php }else{ ?> 是 <span style="margin:0 0 0 2em">开始日期 <?php echo smarty_modifier_date_format($_smarty_tpl->getVariable('show')->value['promotion_start'],"%Y-%m-%d %H:%M");?> </span> <span style="margin:0 0 0 2em">结束日期 <?php echo smarty_modifier_date_format($_smarty_tpl->getVariable('show')->value['promotion_stop'],"%Y-%m-%d %H:%M");?> </span> <span style="margin:0 0 0 2em">促销点 <?php echo $_smarty_tpl->getVariable('show')->value['promotion_dot'];?> %</span> <?php }?> </td> </tr> <tr> <td align="right">国家</td> <td><?php echo $_smarty_tpl->getVariable('country')->value[$_smarty_tpl->getVariable('show')->value['country']];?> (<?php echo $_smarty_tpl->getVariable('show')->value['country'];?> )</td> <td align="right">&nbsp;</td> <td>&nbsp;</td> </tr> <tr> <td align="right">进货价</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['infml'];?> 元</td> <td align="right">国际报价</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['INTbid'];?> 元 / ct</td> </tr> <tr> <td align="right">退点</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['agio'];?> %</td> <td align="right">基准退点</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['baseAgio'];?> %</td> </tr> <tr> <td align="right">产品添加日期</td> <td> <?php echo smarty_modifier_date_format($_smarty_tpl->getVariable('show')->value['time'],"%Y-%m-%d %H:%M:%S");?> </td> <td align="right">最后修改日期</td> <td> <?php echo smarty_modifier_date_format($_smarty_tpl->getVariable('show')->value['edit_time'],"%Y-%m-%d %H:%M:%S");?> </td> </tr> <tr> <td align="right">外部备注</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['userRemark'];?> </td> <td align="right">内部备注</td> <td><?php echo $_smarty_tpl->getVariable('show')->value['backRemark'];?> </td> </tr> </table>
Capitalwang/shehuahk
smarty/temp/81be815715f51430c231e3f87aefdaf3814e919c.file.product_detail.html.php
PHP
apache-2.0
7,390
/* * Copyright 2014 Red Hat, Inc. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package io.vertx.ext.web.sstore.impl; import io.vertx.core.VertxException; import io.vertx.core.buffer.Buffer; import io.vertx.core.shareddata.Shareable; import io.vertx.core.shareddata.impl.ClusterSerializable; import io.vertx.ext.web.Session; import io.vertx.ext.web.impl.Utils; import java.io.*; import java.nio.charset.Charset; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * @author <a href="http://tfox.org">Tim Fox</a> */ public class SessionImpl implements Session, ClusterSerializable, Shareable { private static final Charset UTF8 = Charset.forName("UTF-8"); private static final char[] HEX = "0123456789abcdef".toCharArray(); private static final byte TYPE_LONG = 1; private static final byte TYPE_INT = 2; private static final byte TYPE_SHORT = 3; private static final byte TYPE_BYTE = 4; private static final byte TYPE_DOUBLE = 5; private static final byte TYPE_FLOAT = 6; private static final byte TYPE_CHAR = 7; private static final byte TYPE_BOOLEAN = 8; private static final byte TYPE_STRING = 9; private static final byte TYPE_BUFFER = 10; private static final byte TYPE_BYTES = 11; private static final byte TYPE_SERIALIZABLE = 12; private static final byte TYPE_CLUSTER_SERIALIZABLE = 13; private final PRNG prng; private String id; private long timeout; private volatile Map<String, Object> data; private long lastAccessed; private int version; // state management private boolean destroyed; private boolean renewed; private String oldId; public SessionImpl(PRNG random) { this.prng = random; } public SessionImpl(PRNG random, long timeout, int length) { this.prng = random; this.id = generateId(prng, length); this.timeout = timeout; this.lastAccessed = System.currentTimeMillis(); } @Override public String id() { return id; } @Override public Session regenerateId() { if (oldId == null) { // keep track of just the first one since the // regeneration during the remaining lifecycle are ephemeral oldId = id; } // ids are stored in hex, so the original size is half of the hex encodec length id = generateId(prng, oldId.length() / 2); renewed = true; return this; } @Override public long timeout() { return timeout; } @Override @SuppressWarnings("unchecked") public <T> T get(String key) { Object obj = getData().get(key); return (T) obj; } @Override public Session put(String key, Object obj) { final Map<String, Object> data = getData(); // nulls are handled as remove actions if (obj == null) { data.remove(key); } else { data.put(key, obj); } return this; } @Override @SuppressWarnings("unchecked") public <T> T remove(String key) { Object obj = getData().remove(key); return (T) obj; } @Override public Map<String, Object> data() { return getData(); } @Override public long lastAccessed() { return lastAccessed; } @Override public void setAccessed() { this.lastAccessed = System.currentTimeMillis(); } @Override public void destroy() { synchronized (this) { destroyed = true; data = null; } } @Override public boolean isDestroyed() { return destroyed; } @Override public boolean isRegenerated() { return renewed; } @Override public String oldId() { return oldId; } public int version() { return version; } public void incrementVersion() { ++version; } @Override public void writeToBuffer(Buffer buff) { byte[] bytes = id.getBytes(UTF8); buff.appendInt(bytes.length).appendBytes(bytes); buff.appendLong(timeout); buff.appendLong(lastAccessed); buff.appendInt(version); Buffer dataBuf = writeDataToBuffer(); buff.appendBuffer(dataBuf); } @Override public int readFromBuffer(int pos, Buffer buffer) { int len = buffer.getInt(pos); pos += 4; byte[] bytes = buffer.getBytes(pos, pos + len); pos += len; id = new String(bytes, UTF8); timeout = buffer.getLong(pos); pos += 8; lastAccessed = buffer.getLong(pos); pos += 8; version = buffer.getInt(pos); pos += 4; pos = readDataFromBuffer(pos, buffer); return pos; } private Map<String, Object> getData() { if (data == null) { synchronized (this) { // double check since there could already been someone in the lock if (data == null) { data = new ConcurrentHashMap<>(); if (destroyed) { // pretty much should behave as a regeneration regenerateId(); destroyed = false; } } } } return data; } private Buffer writeDataToBuffer() { try { Buffer buffer = Buffer.buffer(); if (data == null) { buffer.appendInt(0); } else { buffer.appendInt(data.size()); for (Map.Entry<String, Object> entry : data.entrySet()) { String key = entry.getKey(); byte[] keyBytes = key.getBytes(UTF8); buffer.appendInt(keyBytes.length).appendBytes(keyBytes); Object val = entry.getValue(); if (val instanceof Long) { buffer.appendByte(TYPE_LONG).appendLong((long) val); } else if (val instanceof Integer) { buffer.appendByte(TYPE_INT).appendInt((int) val); } else if (val instanceof Short) { buffer.appendByte(TYPE_SHORT).appendShort((short) val); } else if (val instanceof Byte) { buffer.appendByte(TYPE_BYTE).appendByte((byte) val); } else if (val instanceof Double) { buffer.appendByte(TYPE_DOUBLE).appendDouble((double) val); } else if (val instanceof Float) { buffer.appendByte(TYPE_FLOAT).appendFloat((float) val); } else if (val instanceof Character) { buffer.appendByte(TYPE_CHAR).appendShort((short) ((Character) val).charValue()); } else if (val instanceof Boolean) { buffer.appendByte(TYPE_BOOLEAN).appendByte((byte) ((boolean) val ? 1 : 0)); } else if (val instanceof String) { byte[] bytes = ((String) val).getBytes(UTF8); buffer.appendByte(TYPE_STRING).appendInt(bytes.length).appendBytes(bytes); } else if (val instanceof Buffer) { Buffer buff = (Buffer) val; buffer.appendByte(TYPE_BUFFER).appendInt(buff.length()).appendBuffer(buff); } else if (val instanceof byte[]) { byte[] bytes = (byte[]) val; buffer.appendByte(TYPE_BYTES).appendInt(bytes.length).appendBytes(bytes); } else if (val instanceof Serializable) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(new BufferedOutputStream(baos)); oos.writeObject(val); oos.flush(); byte[] bytes = baos.toByteArray(); buffer.appendByte(TYPE_SERIALIZABLE).appendInt(bytes.length).appendBytes(bytes); } else if (val instanceof ClusterSerializable) { buffer.appendByte(TYPE_CLUSTER_SERIALIZABLE); String className = val.getClass().getName(); byte[] classNameBytes = className.getBytes(UTF8); buffer.appendInt(classNameBytes.length).appendBytes(classNameBytes); ((ClusterSerializable) val).writeToBuffer(buffer); } else { if (val != null) { throw new IllegalStateException("Invalid type for data in session: " + val.getClass()); } } } } return buffer; } catch (IOException e) { throw new VertxException(e); } } private int readDataFromBuffer(int pos, Buffer buffer) { try { int entries = buffer.getInt(pos); pos += 4; if (entries != 0) { data = new ConcurrentHashMap<>(entries); for (int i = 0; i < entries; i++) { int keylen = buffer.getInt(pos); pos += 4; byte[] keyBytes = buffer.getBytes(pos, pos + keylen); pos += keylen; String key = new String(keyBytes, UTF8); byte type = buffer.getByte(pos++); Object val; switch (type) { case TYPE_LONG: val = buffer.getLong(pos); pos += 8; break; case TYPE_INT: val = buffer.getInt(pos); pos += 4; break; case TYPE_SHORT: val = buffer.getShort(pos); pos += 2; break; case TYPE_BYTE: val = buffer.getByte(pos); pos++; break; case TYPE_FLOAT: val = buffer.getFloat(pos); pos += 4; break; case TYPE_DOUBLE: val = buffer.getDouble(pos); pos += 8; break; case TYPE_CHAR: short s = buffer.getShort(pos); pos += 2; val = (char) s; break; case TYPE_BOOLEAN: byte b = buffer.getByte(pos); pos++; val = b == 1; break; case TYPE_STRING: int len = buffer.getInt(pos); pos += 4; byte[] bytes = buffer.getBytes(pos, pos + len); val = new String(bytes, UTF8); pos += len; break; case TYPE_BUFFER: len = buffer.getInt(pos); pos += 4; bytes = buffer.getBytes(pos, pos + len); val = Buffer.buffer(bytes); pos += len; break; case TYPE_BYTES: len = buffer.getInt(pos); pos += 4; val = buffer.getBytes(pos, pos + len); pos += len; break; case TYPE_SERIALIZABLE: len = buffer.getInt(pos); pos += 4; bytes = buffer.getBytes(pos, pos + len); ByteArrayInputStream bais = new ByteArrayInputStream(bytes); ObjectInputStream ois = new ObjectInputStream(new BufferedInputStream(bais)); val = ois.readObject(); pos += len; break; case TYPE_CLUSTER_SERIALIZABLE: int classNameLen = buffer.getInt(pos); pos += 4; byte[] classNameBytes = buffer.getBytes(pos, pos + classNameLen); pos += classNameLen; String className = new String(classNameBytes, UTF8); Class clazz = Utils.getClassLoader().loadClass(className); ClusterSerializable obj = (ClusterSerializable) clazz.newInstance(); pos = obj.readFromBuffer(pos, buffer); val = obj; break; default: throw new IllegalStateException("Invalid serialized type: " + type); } data.put(key, val); } } return pos; } catch (Exception e) { throw new VertxException(e); } } private static String generateId(PRNG rng, int length) { final byte[] bytes = new byte[length]; rng.nextBytes(bytes); final char[] hex = new char[length * 2]; for (int j = 0; j < length; j++) { int v = bytes[j] & 0xFF; hex[j * 2] = HEX[v >>> 4]; hex[j * 2 + 1] = HEX[v & 0x0F]; } return new String(hex); } }
sibay/vertx-web
vertx-web/src/main/java/io/vertx/ext/web/sstore/impl/SessionImpl.java
Java
apache-2.0
12,081
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/core/framework/numeric_op.h" #include "tensorflow/core/framework/op.h" #include "tensorflow/core/framework/shape_inference.h" #include "tensorflow/core/util/padding.h" #include "tensorflow/core/util/tensor_format.h" namespace tensorflow { typedef shape_inference::Dimension Dimension; typedef shape_inference::InferenceContext InferenceContext; typedef shape_inference::Shape Shape; // -------------------------------------------------------------------------- REGISTER_OP("AvgPool") .Input("value: T") .Output("output: T") .Attr("ksize: list(int) >= 4") .Attr("strides: list(int) >= 4") .Attr(GetPaddingAttrString()) .Attr(GetConvnetDataFormatAttrString()) .Attr("T: {float, half, double}") .Doc(R"doc( Performs average pooling on the input. Each entry in `output` is the mean of the corresponding size `ksize` window in `value`. value: 4-D with shape `[batch, height, width, channels]`. ksize: The size of the sliding window for each dimension of `value`. strides: The stride of the sliding window for each dimension of `value`. padding: The type of padding algorithm to use. data_format: Specify the data format of the input and output data. With the default format "NHWC", the data is stored in the order of: [batch, in_height, in_width, in_channels]. Alternatively, the format could be "NCHW", the data storage order of: [batch, in_channels, in_height, in_width]. output: The average pooled output tensor. )doc"); REGISTER_OP("AvgPoolGrad") .Input("orig_input_shape: int32") .Input("grad: T") .Output("output: T") .Attr("ksize: list(int) >= 4") .Attr("strides: list(int) >= 4") .Attr(GetPaddingAttrString()) .Attr(GetConvnetDataFormatAttrString()) .Attr("T: {float, half, double}") .Doc(R"doc( Computes gradients of the average pooling function. orig_input_shape: 1-D. Shape of the original input to `avg_pool`. grad: 4-D with shape `[batch, height, width, channels]`. Gradients w.r.t. the output of `avg_pool`. ksize: The size of the sliding window for each dimension of the input. strides: The stride of the sliding window for each dimension of the input. padding: The type of padding algorithm to use. data_format: Specify the data format of the input and output data. With the default format "NHWC", the data is stored in the order of: [batch, in_height, in_width, in_channels]. Alternatively, the format could be "NCHW", the data storage order of: [batch, in_channels, in_height, in_width]. output: 4-D. Gradients w.r.t. the input of `avg_pool`. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("BatchNormWithGlobalNormalization") .Input("t: T") .Input("m: T") .Input("v: T") .Input("beta: T") .Input("gamma: T") .Output("result: T") .Attr("T: numbertype") .Attr("variance_epsilon: float") .Attr("scale_after_normalization: bool") .Deprecated(9, "Use tf.nn.batch_normalization()") .Doc(R"doc( Batch normalization. This op is deprecated. Prefer `tf.nn.batch_normalization`. t: A 4D input Tensor. m: A 1D mean Tensor with size matching the last dimension of t. This is the first output from tf.nn.moments, or a saved moving average thereof. v: A 1D variance Tensor with size matching the last dimension of t. This is the second output from tf.nn.moments, or a saved moving average thereof. beta: A 1D beta Tensor with size matching the last dimension of t. An offset to be added to the normalized tensor. gamma: A 1D gamma Tensor with size matching the last dimension of t. If "scale_after_normalization" is true, this tensor will be multiplied with the normalized tensor. variance_epsilon: A small float number to avoid dividing by 0. scale_after_normalization: A bool indicating whether the resulted tensor needs to be multiplied with gamma. )doc"); REGISTER_OP("BatchNormWithGlobalNormalizationGrad") .Input("t: T") .Input("m: T") .Input("v: T") .Input("gamma: T") .Input("backprop: T") .Output("dx: T") .Output("dm: T") .Output("dv: T") .Output("db: T") .Output("dg: T") .Attr("T: numbertype") .Attr("variance_epsilon: float") .Attr("scale_after_normalization: bool") .Deprecated(9, "Use tf.nn.batch_normalization()") .Doc(R"doc( Gradients for batch normalization. This op is deprecated. See `tf.nn.batch_normalization`. t: A 4D input Tensor. m: A 1D mean Tensor with size matching the last dimension of t. This is the first output from tf.nn.moments, or a saved moving average thereof. v: A 1D variance Tensor with size matching the last dimension of t. This is the second output from tf.nn.moments, or a saved moving average thereof. gamma: A 1D gamma Tensor with size matching the last dimension of t. If "scale_after_normalization" is true, this Tensor will be multiplied with the normalized Tensor. backprop: 4D backprop Tensor. variance_epsilon: A small float number to avoid dividing by 0. scale_after_normalization: A bool indicating whether the resulted tensor needs to be multiplied with gamma. dx: 4D backprop tensor for input. dm: 1D backprop tensor for mean. dv: 1D backprop tensor for variance. db: 1D backprop tensor for beta. dg: 1D backprop tensor for gamma. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("BiasAdd") .Attr("T: numbertype") .Input("value: T") .Input("bias: T") .Attr(GetConvnetDataFormatAttrString()) .Output("output: T") .Doc(R"doc( Adds `bias` to `value`. This is a special case of `tf.add` where `bias` is restricted to be 1-D. Broadcasting is supported, so `value` may have any number of dimensions. value: Any number of dimensions. bias: 1-D with size the last dimension of `value`. data_format: Specify the data format of the input and output data. With the default format "NHWC", the bias tensor will be added to the last dimension of the value tensor. Alternatively, the format could be "NCHW", the data storage order of: [batch, in_channels, in_height, in_width]. The tensor will be added to "in_channels", the third-to-the-last dimension. output: Broadcasted sum of `value` and `bias`. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("BiasAddGrad") .Attr("T: numbertype") .Input("out_backprop: T") .Attr(GetConvnetDataFormatAttrString()) .Output("output: T") .Doc(R"doc( The backward operation for "BiasAdd" on the "bias" tensor. It accumulates all the values from out_backprop into the feature dimension. For NHWC data format, the feature dimension is the last. For NCHW data format, the feature dimension is the third-to-last. out_backprop: Any number of dimensions. output: 1-D with size the feature dimension of `out_backprop`. data_format: Specify the data format of the input and output data. With the default format "NHWC", the bias tensor will be added to the last dimension of the value tensor. Alternatively, the format could be "NCHW", the data storage order of: [batch, in_channels, in_height, in_width]. The tensor will be added to "in_channels", the third-to-the-last dimension. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("BiasAddV1") .Attr("T: numbertype") .Input("value: T") .Input("bias: T") .Output("output: T") .Doc(R"doc( Adds `bias` to `value`. This is a deprecated version of BiasAdd and will be soon removed. This is a special case of `tf.add` where `bias` is restricted to be 1-D. Broadcasting is supported, so `value` may have any number of dimensions. value: Any number of dimensions. bias: 1-D with size the last dimension of `value`. output: Broadcasted sum of `value` and `bias`. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("Conv2D") .Input("input: T") .Input("filter: T") .Output("output: T") .Attr("T: {half, float, double}") .Attr("strides: list(int)") .Attr("use_cudnn_on_gpu: bool = true") .Attr(GetPaddingAttrString()) .Attr(GetConvnetDataFormatAttrString()) .Doc(R"doc( Computes a 2-D convolution given 4-D `input` and `filter` tensors. Given an input tensor of shape `[batch, in_height, in_width, in_channels]` and a filter / kernel tensor of shape `[filter_height, filter_width, in_channels, out_channels]`, this op performs the following: 1. Flattens the filter to a 2-D matrix with shape `[filter_height * filter_width * in_channels, output_channels]`. 2. Extracts image patches from the input tensor to form a *virtual* tensor of shape `[batch, out_height, out_width, filter_height * filter_width * in_channels]`. 3. For each patch, right-multiplies the filter matrix and the image patch vector. In detail, with the default NHWC format, output[b, i, j, k] = sum_{di, dj, q} input[b, strides[1] * i + di, strides[2] * j + dj, q] * filter[di, dj, q, k] Must have `strides[0] = strides[3] = 1`. For the most common case of the same horizontal and vertices strides, `strides = [1, stride, stride, 1]`. strides: 1-D of length 4. The stride of the sliding window for each dimension of `input`. Must be in the same order as the dimension specified with format. padding: The type of padding algorithm to use. data_format: Specify the data format of the input and output data. With the default format "NHWC", the data is stored in the order of: [batch, in_height, in_width, in_channels]. Alternatively, the format could be "NCHW", the data storage order of: [batch, in_channels, in_height, in_width]. )doc"); REGISTER_OP("Conv2DBackpropInput") .Input("input_sizes: int32") .Input("filter: T") .Input("out_backprop: T") .Output("output: T") .Attr("T: {half, float, double}") .Attr("strides: list(int)") .Attr("use_cudnn_on_gpu: bool = true") .Attr(GetPaddingAttrString()) .Attr(GetConvnetDataFormatAttrString()) .Doc(R"doc( Computes the gradients of convolution with respect to the input. input_sizes: An integer vector representing the shape of `input`, where `input` is a 4-D `[batch, height, width, channels]` tensor. filter: 4-D with shape `[filter_height, filter_width, in_channels, out_channels]`. out_backprop: 4-D with shape `[batch, out_height, out_width, out_channels]`. Gradients w.r.t. the output of the convolution. strides: The stride of the sliding window for each dimension of the input of the convolution. Must be in the same order as the dimension specified with format. padding: The type of padding algorithm to use. output: 4-D with shape `[batch, in_height, in_width, in_channels]`. Gradient w.r.t. the input of the convolution. data_format: Specify the data format of the input and output data. With the default format "NHWC", the data is stored in the order of: [batch, in_height, in_width, in_channels]. Alternatively, the format could be "NCHW", the data storage order of: [batch, in_channels, in_height, in_width]. )doc"); // TODO(jeff): Instead of 'use_cudnn_for_gpu', maybe we should have a // more general string attribute ('kernel_impl'?) that can be used to // select among several possible implementations. REGISTER_OP("Conv2DBackpropFilter") .Input("input: T") .Input("filter_sizes: int32") .Input("out_backprop: T") .Output("output: T") .Attr("T: {half, float, double}") .Attr("strides: list(int)") .Attr("use_cudnn_on_gpu: bool = true") .Attr(GetPaddingAttrString()) .Attr(GetConvnetDataFormatAttrString()) .Doc(R"doc( Computes the gradients of convolution with respect to the filter. input: 4-D with shape `[batch, in_height, in_width, in_channels]`. filter_sizes: An integer vector representing the tensor shape of `filter`, where `filter` is a 4-D `[filter_height, filter_width, in_channels, out_channels]` tensor. out_backprop: 4-D with shape `[batch, out_height, out_width, out_channels]`. Gradients w.r.t. the output of the convolution. strides: The stride of the sliding window for each dimension of the input of the convolution. Must be in the same order as the dimension specified with format. padding: The type of padding algorithm to use. output: 4-D with shape `[filter_height, filter_width, in_channels, out_channels]`. Gradient w.r.t. the `filter` input of the convolution. data_format: Specify the data format of the input and output data. With the default format "NHWC", the data is stored in the order of: [batch, in_height, in_width, in_channels]. Alternatively, the format could be "NCHW", the data storage order of: [batch, in_channels, in_height, in_width]. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("DepthwiseConv2dNative") .Input("input: T") .Input("filter: T") .Output("output: T") .Attr("T: {float, double}") .Attr("strides: list(int)") .Attr(GetPaddingAttrString()) .Doc(R"doc( Computes a 2-D depthwise convolution given 4-D `input` and `filter` tensors. Given an input tensor of shape `[batch, in_height, in_width, in_channels]` and a filter / kernel tensor of shape `[filter_height, filter_width, in_channels, channel_multiplier]`, containing `in_channels` convolutional filters of depth 1, `depthwise_conv2d` applies a different filter to each input channel (expanding from 1 channel to `channel_multiplier` channels for each), then concatenates the results together. Thus, the output has `in_channels * channel_multiplier` channels. for k in 0..in_channels-1 for q in 0..channel_multiplier-1 output[b, i, j, k * channel_multiplier + q] = sum_{di, dj} input[b, strides[1] * i + di, strides[2] * j + dj, k] * filter[di, dj, k, q] Must have `strides[0] = strides[3] = 1`. For the most common case of the same horizontal and vertices strides, `strides = [1, stride, stride, 1]`. strides: 1-D of length 4. The stride of the sliding window for each dimension of `input`. padding: The type of padding algorithm to use. )doc"); REGISTER_OP("DepthwiseConv2dNativeBackpropInput") .Input("input_sizes: int32") .Input("filter: T") .Input("out_backprop: T") .Output("output: T") .Attr("T: {float, double}") .Attr("strides: list(int)") .Attr(GetPaddingAttrString()) .Doc(R"doc( Computes the gradients of depthwise convolution with respect to the input. input_sizes: An integer vector representing the shape of `input`, where `input` is a 4-D `[batch, height, width, channels]` tensor. filter: 4-D with shape `[filter_height, filter_width, in_channels, depthwise_multiplier]`. out_backprop: 4-D with shape `[batch, out_height, out_width, out_channels]`. Gradients w.r.t. the output of the convolution. strides: The stride of the sliding window for each dimension of the input of the convolution. padding: The type of padding algorithm to use. output: 4-D with shape `[batch, in_height, in_width, in_channels]`. Gradient w.r.t. the input of the convolution. )doc"); REGISTER_OP("DepthwiseConv2dNativeBackpropFilter") .Input("input: T") .Input("filter_sizes: int32") .Input("out_backprop: T") .Output("output: T") .Attr("T: {float, double}") .Attr("strides: list(int)") .Attr(GetPaddingAttrString()) .Doc(R"doc( Computes the gradients of depthwise convolution with respect to the filter. input: 4-D with shape `[batch, in_height, in_width, in_channels]`. filter_sizes: An integer vector representing the tensor shape of `filter`, where `filter` is a 4-D `[filter_height, filter_width, in_channels, depthwise_multiplier]` tensor. out_backprop: 4-D with shape `[batch, out_height, out_width, out_channels]`. Gradients w.r.t. the output of the convolution. strides: The stride of the sliding window for each dimension of the input of the convolution. padding: The type of padding algorithm to use. output: 4-D with shape `[filter_height, filter_width, in_channels, out_channels]`. Gradient w.r.t. the `filter` input of the convolution. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("Conv3D") .Input("input: T") .Input("filter: T") .Output("output: T") .Attr("T: numbertype") .Attr("strides: list(int) >= 5") .Attr(GetPaddingAttrString()) .Doc(R"doc( Computes a 3-D convolution given 5-D `input` and `filter` tensors. In signal processing, cross-correlation is a measure of similarity of two waveforms as a function of a time-lag applied to one of them. This is also known as a sliding dot product or sliding inner-product. Our Conv3D implements a form of cross-correlation. input: Shape `[batch, in_depth, in_height, in_width, in_channels]`. filter: Shape `[filter_depth, filter_height, filter_width, in_channels, out_channels]`. `in_channels` must match between `input` and `filter`. strides: 1-D tensor of length 5. The stride of the sliding window for each dimension of `input`. Must have `strides[0] = strides[4] = 1`. padding: The type of padding algorithm to use. )doc"); REGISTER_OP("Conv3DBackpropInput") .Input("input: T") .Input("filter: T") .Input("out_backprop: T") .Output("output: T") .Attr("T: numbertype") .Attr("strides: list(int) >= 5") .Attr(GetPaddingAttrString()) .Deprecated(10, "Use Conv3DBackpropInputV2") .Doc(R"doc( Computes the gradients of 3-D convolution with respect to the input. input: Shape `[batch, depth, rows, cols, in_channels]`. filter: Shape `[depth, rows, cols, in_channels, out_channels]`. `in_channels` must match between `input` and `filter`. out_backprop: Backprop signal of shape `[batch, out_depth, out_rows, out_cols, out_channels]`. strides: 1-D tensor of length 5. The stride of the sliding window for each dimension of `input`. Must have `strides[0] = strides[4] = 1`. padding: The type of padding algorithm to use. )doc"); REGISTER_OP("Conv3DBackpropFilter") .Input("input: T") .Input("filter: T") .Input("out_backprop: T") .Output("output: T") .Attr("T: numbertype") .Attr("strides: list(int) >= 5") .Attr(GetPaddingAttrString()) .Deprecated(10, "Use Conv3DBackpropFilterV2") .Doc(R"doc( Computes the gradients of 3-D convolution with respect to the filter. input: Shape `[batch, depth, rows, cols, in_channels]`. filter: Shape `[depth, rows, cols, in_channels, out_channels]`. `in_channels` must match between `input` and `filter`. out_backprop: Backprop signal of shape `[batch, out_depth, out_rows, out_cols, out_channels]`. strides: 1-D tensor of length 5. The stride of the sliding window for each dimension of `input`. Must have `strides[0] = strides[4] = 1`. padding: The type of padding algorithm to use. )doc"); REGISTER_OP("Conv3DBackpropInputV2") .Input("input_sizes: int32") .Input("filter: T") .Input("out_backprop: T") .Output("output: T") .Attr("T: numbertype") .Attr("strides: list(int) >= 5") .Attr(GetPaddingAttrString()) .Doc(R"doc( Computes the gradients of 3-D convolution with respect to the input. input_sizes: An integer vector representing the tensor shape of `input`, where `input` is a 5-D `[batch, depth, rows, cols, in_channels]` tensor. filter: Shape `[depth, rows, cols, in_channels, out_channels]`. `in_channels` must match between `input` and `filter`. out_backprop: Backprop signal of shape `[batch, out_depth, out_rows, out_cols, out_channels]`. strides: 1-D tensor of length 5. The stride of the sliding window for each dimension of `input`. Must have `strides[0] = strides[4] = 1`. padding: The type of padding algorithm to use. )doc"); REGISTER_OP("Conv3DBackpropFilterV2") .Input("input: T") .Input("filter_sizes: int32") .Input("out_backprop: T") .Output("output: T") .Attr("T: numbertype") .Attr("strides: list(int) >= 5") .Attr(GetPaddingAttrString()) .Doc(R"doc( Computes the gradients of 3-D convolution with respect to the filter. input: Shape `[batch, depth, rows, cols, in_channels]`. filter_sizes: An integer vector representing the tensor shape of `filter`, where `filter` is a 5-D `[filter_depth, filter_height, filter_width, in_channels, out_channels]` tensor. out_backprop: Backprop signal of shape `[batch, out_depth, out_rows, out_cols, out_channels]`. strides: 1-D tensor of length 5. The stride of the sliding window for each dimension of `input`. Must have `strides[0] = strides[4] = 1`. padding: The type of padding algorithm to use. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("AvgPool3D") .Input("input: T") .Output("output: T") .Attr("ksize: list(int) >= 5") .Attr("strides: list(int) >= 5") .Attr(GetPaddingAttrString()) .Attr("T: numbertype") .Doc(R"doc( Performs 3D average pooling on the input. ksize: 1-D tensor of length 5. The size of the window for each dimension of the input tensor. Must have `ksize[0] = ksize[4] = 1`. strides: 1-D tensor of length 5. The stride of the sliding window for each dimension of `input`. Must have `strides[0] = strides[4] = 1`. padding: The type of padding algorithm to use. input: Shape `[batch, depth, rows, cols, channels]` tensor to pool over. output: The average pooled output tensor. )doc"); REGISTER_OP("AvgPool3DGrad") .Input("orig_input_shape: int32") .Input("grad: T") .Output("output: T") .Attr("ksize: list(int) >= 5") .Attr("strides: list(int) >= 5") .Attr(GetPaddingAttrString()) .Attr("T: numbertype") .Doc(R"doc( Computes gradients of average pooling function. ksize: 1-D tensor of length 5. The size of the window for each dimension of the input tensor. Must have `ksize[0] = ksize[4] = 1`. strides: 1-D tensor of length 5. The stride of the sliding window for each dimension of `input`. Must have `strides[0] = strides[4] = 1`. padding: The type of padding algorithm to use. orig_input_shape: The original input dimensions. grad: Output backprop of shape `[batch, depth, rows, cols, channels]`. output: The backprop for input. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("MaxPool3D") .Input("input: T") .Output("output: T") .Attr("ksize: list(int) >= 5") .Attr("strides: list(int) >= 5") .Attr(GetPaddingAttrString()) .Attr("T: numbertype") .Doc(R"doc( Performs 3D max pooling on the input. ksize: 1-D tensor of length 5. The size of the window for each dimension of the input tensor. Must have `ksize[0] = ksize[4] = 1`. strides: 1-D tensor of length 5. The stride of the sliding window for each dimension of `input`. Must have `strides[0] = strides[4] = 1`. padding: The type of padding algorithm to use. input: Shape `[batch, depth, rows, cols, channels]` tensor to pool over. output: The max pooled output tensor. )doc"); REGISTER_OP("MaxPool3DGrad") .Input("orig_input: float") .Input("orig_output: float") .Input("grad: T") .Output("output: T") .Attr("ksize: list(int) >= 5 ") .Attr("strides: list(int) >= 5") .Attr(GetPaddingAttrString()) .Attr("T: numbertype") .Doc(R"doc( Computes gradients of max pooling function. ksize: 1-D tensor of length 5. The size of the window for each dimension of the input tensor. Must have `ksize[0] = ksize[4] = 1`. strides: 1-D tensor of length 5. The stride of the sliding window for each dimension of `input`. Must have `strides[0] = strides[4] = 1`. padding: The type of padding algorithm to use. orig_input: The original input tensor. orig_output: The original output tensor. grad: Output backprop of shape `[batch, depth, rows, cols, channels]`. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("L2Loss") .Input("t: T") .Output("output: T") .Attr("T: numbertype") .Doc(R"doc( L2 Loss. Computes half the L2 norm of a tensor without the `sqrt`: output = sum(t ** 2) / 2 t: Typically 2-D, but may have any dimensions. output: 0-D. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("LRN") .Input("input: T") .Output("output: T") .Attr("depth_radius: int = 5") .Attr("bias: float = 1.0") .Attr("alpha: float = 1.0") .Attr("beta: float = 0.5") .Attr("T: {float, half} = DT_FLOAT") .Doc(R"doc( Local Response Normalization. The 4-D `input` tensor is treated as a 3-D array of 1-D vectors (along the last dimension), and each vector is normalized independently. Within a given vector, each component is divided by the weighted, squared sum of inputs within `depth_radius`. In detail, sqr_sum[a, b, c, d] = sum(input[a, b, c, d - depth_radius : d + depth_radius + 1] ** 2) output = input / (bias + alpha * sqr_sum) ** beta For details, see [Krizhevsky et al., ImageNet classification with deep convolutional neural networks (NIPS 2012)] (http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks). input: 4-D. depth_radius: 0-D. Half-width of the 1-D normalization window. bias: An offset (usually positive to avoid dividing by 0). alpha: A scale factor, usually positive. beta: An exponent. )doc"); REGISTER_OP("LRNGrad") .Input("input_grads: T") .Input("input_image: T") .Input("output_image: T") .Output("output: T") .Attr("depth_radius: int = 5") .Attr("bias: float = 1.0") .Attr("alpha: float = 1.0") .Attr("beta: float = 0.5") .Attr("T: {float, half} = DT_FLOAT") .Doc(R"doc( Gradients for Local Response Normalization. input_grads: 4-D with shape `[batch, height, width, channels]`. input_image: 4-D with shape `[batch, height, width, channels]`. output_image: 4-D with shape `[batch, height, width, channels]`. depth_radius: A depth radius. bias: An offset (usually > 0 to avoid dividing by 0). alpha: A scale factor, usually positive. beta: An exponent. output: The gradients for LRN. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("MaxPool") .Attr("T: {float, half} = DT_FLOAT") .Attr("ksize: list(int) >= 4") .Attr("strides: list(int) >= 4") .Attr(GetPaddingAttrString()) .Attr(GetConvnetDataFormatAttrString()) .Input("input: T") .Output("output: T") .Doc(R"doc( Performs max pooling on the input. ksize: The size of the window for each dimension of the input tensor. strides: The stride of the sliding window for each dimension of the input tensor. padding: The type of padding algorithm to use. data_format: Specify the data format of the input and output data. With the default format "NHWC", the data is stored in the order of: [batch, in_height, in_width, in_channels]. Alternatively, the format could be "NCHW", the data storage order of: [batch, in_channels, in_height, in_width]. input: 4-D input to pool over. output: The max pooled output tensor. )doc"); REGISTER_OP("MaxPoolGrad") .Attr("ksize: list(int) >= 4") .Attr("strides: list(int) >= 4") .Attr(GetPaddingAttrString()) .Attr(GetConvnetDataFormatAttrString()) .Input("orig_input: T") .Input("orig_output: T") .Input("grad: T") .Output("output: T") .Attr("T: {float, half} = DT_FLOAT") .Doc(R"doc( Computes gradients of the maxpooling function. ksize: The size of the window for each dimension of the input tensor. strides: The stride of the sliding window for each dimension of the input tensor. padding: The type of padding algorithm to use. data_format: Specify the data format of the input and output data. With the default format "NHWC", the data is stored in the order of: [batch, in_height, in_width, in_channels]. Alternatively, the format could be "NCHW", the data storage order of: [batch, in_channels, in_height, in_width]. orig_input: The original input tensor. orig_output: The original output tensor. grad: 4-D. Gradients w.r.t. the output of `max_pool`. output: Gradients w.r.t. the input to `max_pool`. )doc"); REGISTER_OP("MaxPoolWithArgmax") .Attr("ksize: list(int) >= 4") .Attr("strides: list(int) >= 4") .Attr("Targmax: {int32, int64} = DT_INT64") .Attr(GetPaddingAttrString()) .Input("input: T") .Output("output: T") .Output("argmax: Targmax") .Attr("T: {float, half} = DT_FLOAT") .Doc(R"doc( Performs max pooling on the input and outputs both max values and indices. The indices in `argmax` are flattened, so that a maximum value at position `[b, y, x, c]` becomes flattened index `((b * height + y) * width + x) * channels + c`. ksize: The size of the window for each dimension of the input tensor. strides: The stride of the sliding window for each dimension of the input tensor. padding: The type of padding algorithm to use. input: 4-D with shape `[batch, height, width, channels]`. Input to pool over. output: The max pooled output tensor. argmax: 4-D. The flattened indices of the max values chosen for each output. )doc"); REGISTER_OP("MaxPoolGradWithArgmax") .Attr("ksize: list(int) >= 4") .Attr("strides: list(int) >= 4") .Attr(GetPaddingAttrString()) .Attr("Targmax: {int32, int64}") .Input("input: T") .Input("grad: T") .Input("argmax: Targmax") .Output("output: T") .Attr("T: {float, half} = DT_FLOAT") .Doc(R"doc( Computes gradients of the maxpooling function. ksize: The size of the window for each dimension of the input tensor. strides: The stride of the sliding window for each dimension of the input tensor. padding: The type of padding algorithm to use. input: The original input. grad: 4-D with shape `[batch, height, width, channels]`. Gradients w.r.t. the output of `max_pool`. argmax: The indices of the maximum values chosen for each output of `max_pool`. output: Gradients w.r.t. the input of `max_pool`. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("Dilation2D") .Input("input: T") .Input("filter: T") .Output("output: T") .Attr("T: realnumbertype") .Attr("strides: list(int) >= 4") .Attr("rates: list(int) >= 4") .Attr(GetPaddingAttrString()) .Doc(R"doc( Computes the grayscale dilation of 4-D `input` and 3-D `filter` tensors. The `input` tensor has shape `[batch, in_height, in_width, depth]` and the `filter` tensor has shape `[filter_height, filter_width, depth]`, i.e., each input channel is processed independently of the others with its own structuring function. The `output` tensor has shape `[batch, out_height, out_width, depth]`. The spatial dimensions of the output tensor depend on the `padding` algorithm. We currently only support the default "NHWC" `data_format`. In detail, the grayscale morphological 2-D dilation is the max-sum correlation (for consistency with `conv2d`, we use unmirrored filters): output[b, y, x, c] = max_{dy, dx} input[b, strides[1] * y + rates[1] * dy, strides[2] * x + rates[2] * dx, c] + filter[dy, dx, c] Max-pooling is a special case when the filter has size equal to the pooling kernel size and contains all zeros. Note on duality: The dilation of `input` by the `filter` is equal to the negation of the erosion of `-input` by the reflected `filter`. input: 4-D with shape `[batch, in_height, in_width, depth]`. filter: 3-D with shape `[filter_height, filter_width, depth]`. strides: The stride of the sliding window for each dimension of the input tensor. Must be: `[1, stride_height, stride_width, 1]`. rates: The input stride for atrous morphological dilation. Must be: `[1, rate_height, rate_width, 1]`. padding: The type of padding algorithm to use. output: 4-D with shape `[batch, out_height, out_width, depth]`. )doc"); REGISTER_OP("Dilation2DBackpropInput") .Input("input: T") .Input("filter: T") .Input("out_backprop: T") .Output("in_backprop: T") .Attr("T: realnumbertype") .Attr("strides: list(int) >= 4") .Attr("rates: list(int) >= 4") .Attr(GetPaddingAttrString()) .Doc(R"doc( Computes the gradient of morphological 2-D dilation with respect to the input. input: 4-D with shape `[batch, in_height, in_width, depth]`. filter: 3-D with shape `[filter_height, filter_width, depth]`. out_backprop: 4-D with shape `[batch, out_height, out_width, depth]`. in_backprop: 4-D with shape `[batch, in_height, in_width, depth]`. strides: 1-D of length 4. The stride of the sliding window for each dimension of the input tensor. Must be: `[1, stride_height, stride_width, 1]`. rates: 1-D of length 4. The input stride for atrous morphological dilation. Must be: `[1, rate_height, rate_width, 1]`. padding: The type of padding algorithm to use. )doc"); REGISTER_OP("Dilation2DBackpropFilter") .Input("input: T") .Input("filter: T") .Input("out_backprop: T") .Output("filter_backprop: T") .Attr("T: realnumbertype") .Attr("strides: list(int) >= 4") .Attr("rates: list(int) >= 4") .Attr(GetPaddingAttrString()) .Doc(R"doc( Computes the gradient of morphological 2-D dilation with respect to the filter. input: 4-D with shape `[batch, in_height, in_width, depth]`. filter: 3-D with shape `[filter_height, filter_width, depth]`. out_backprop: 4-D with shape `[batch, out_height, out_width, depth]`. filter_backprop: 3-D with shape `[filter_height, filter_width, depth]`. strides: 1-D of length 4. The stride of the sliding window for each dimension of the input tensor. Must be: `[1, stride_height, stride_width, 1]`. rates: 1-D of length 4. The input stride for atrous morphological dilation. Must be: `[1, rate_height, rate_width, 1]`. padding: The type of padding algorithm to use. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("Relu") .Input("features: T") .Output("activations: T") .Attr("T: realnumbertype") .Doc(R"doc( Computes rectified linear: `max(features, 0)`. )doc"); REGISTER_OP("ReluGrad") .Input("gradients: T") .Input("features: T") .Output("backprops: T") .Attr("T: realnumbertype") .Doc(R"doc( Computes rectified linear gradients for a Relu operation. gradients: The backpropagated gradients to the corresponding Relu operation. features: The features passed as input to the corresponding Relu operation, OR the outputs of that operation (both work equivalently). backprops: `gradients * (features > 0)`. )doc"); REGISTER_OP("Relu6") .Input("features: T") .Output("activations: T") .Attr("T: realnumbertype") .Doc(R"doc( Computes rectified linear 6: `min(max(features, 0), 6)`. )doc"); REGISTER_OP("Relu6Grad") .Input("gradients: T") .Input("features: T") .Output("backprops: T") .Attr("T: realnumbertype") .Doc(R"doc( Computes rectified linear 6 gradients for a Relu6 operation. gradients: The backpropagated gradients to the corresponding Relu6 operation. features: The features passed as input to the corresponding Relu6 operation. backprops: The gradients: `gradients * features * (features > 0) * (features < 6)`. )doc"); REGISTER_OP("Elu") .Input("features: T") .Output("activations: T") .Attr("T: {float, double}") .Doc(R"doc( Computes exponential linear: `exp(features) - 1` if < 0, `features` otherwise. See [Fast and Accurate Deep Network Learning by Exponential Linear Units (ELUs) ](http://arxiv.org/abs/1511.07289) )doc"); REGISTER_OP("EluGrad") .Input("gradients: T") .Input("outputs: T") .Output("backprops: T") .Attr("T: {float, double}") .Doc(R"doc( Computes gradients for the exponential linear (Elu) operation. gradients: The backpropagated gradients to the corresponding Elu operation. outputs: The outputs of the corresponding Elu operation. backprops: The gradients: `gradients * (outputs + 1)` if outputs < 0, `gradients` otherwise. )doc"); REGISTER_OP("Softplus") .Input("features: T") .Output("activations: T") .Attr("T: realnumbertype") .Doc(R"doc( Computes softplus: `log(exp(features) + 1)`. )doc"); REGISTER_OP("SoftplusGrad") .Input("gradients: T") .Input("features: T") .Output("backprops: T") .Attr("T: realnumbertype") .Doc(R"doc( Computes softplus gradients for a softplus operation. gradients: The backpropagated gradients to the corresponding softplus operation. features: The features passed as input to the corresponding softplus operation. backprops: The gradients: `gradients / (1 + exp(-features))`. )doc"); REGISTER_OP("Softsign") .Input("features: T") .Output("activations: T") .Attr("T: realnumbertype") .Doc(R"doc( Computes softsign: `features / (abs(features) + 1)`. )doc"); REGISTER_OP("SoftsignGrad") .Input("gradients: T") .Input("features: T") .Output("backprops: T") .Attr("T: realnumbertype") .Doc(R"doc( Computes softsign gradients for a softsign operation. gradients: The backpropagated gradients to the corresponding softsign operation. features: The features passed as input to the corresponding softsign operation. backprops: The gradients: `gradients / (1 + abs(-features)) ** 2`. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("Softmax") .Input("logits: T") .Output("softmax: T") .Attr("T: {half, float, double}") .Doc(R"doc( Computes softmax activations. For each batch `i` and class `j` we have softmax[i, j] = exp(logits[i, j]) / sum_j(exp(logits[i, j])) logits: 2-D with shape `[batch_size, num_classes]`. softmax: Same shape as `logits`. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("LogSoftmax") .Input("logits: T") .Output("logsoftmax: T") .Attr("T: {half, float, double}") .Doc(R"doc( Computes log softmax activations. For each batch `i` and class `j` we have logsoftmax[i, j] = logits[i, j] - log(sum(exp(logits[i]))) logits: 2-D with shape `[batch_size, num_classes]`. logsoftmax: Same shape as `logits`. )doc"); // -------------------------------------------------------------------------- REGISTER_OP("SoftmaxCrossEntropyWithLogits") .Input("features: T") .Input("labels: T") .Output("loss: T") .Output("backprop: T") .Attr("T: {half, float, double}") .Doc(R"doc( Computes softmax cross entropy cost and gradients to backpropagate. Inputs are the logits, not probabilities. features: batch_size x num_classes matrix labels: batch_size x num_classes matrix The caller must ensure that each batch of labels represents a valid probability distribution. loss: Per example loss (batch_size vector). backprop: backpropagated gradients (batch_size x num_classes matrix). )doc"); REGISTER_OP("SparseSoftmaxCrossEntropyWithLogits") .Input("features: T") .Input("labels: Tlabels") .Output("loss: T") .Output("backprop: T") .Attr("T: {half, float, double}") .Attr("Tlabels: {int32, int64} = DT_INT64") .Doc(R"doc( Computes softmax cross entropy cost and gradients to backpropagate. Unlike `SoftmaxCrossEntropyWithLogits`, this operation does not accept a matrix of label probabilities, but rather a single label per row of features. This label is considered to have probability 1.0 for the given row. Inputs are the logits, not probabilities. features: batch_size x num_classes matrix labels: batch_size vector with values in [0, num_classes). This is the label for the given minibatch entry. loss: Per example loss (batch_size vector). backprop: backpropagated gradients (batch_size x num_classes matrix). )doc"); // -------------------------------------------------------------------------- REGISTER_OP("InTopK") .Input("predictions: float") .Input("targets: T") .Output("precision: bool") .Attr("k: int") .Attr("T: {int32, int64} = DT_INT32") .Doc(R"doc( Says whether the targets are in the top `K` predictions. This outputs a `batch_size` bool array, an entry `out[i]` is `true` if the prediction for the target class is among the top `k` predictions among all predictions for example `i`. Note that the behavior of `InTopK` differs from the `TopK` op in its handling of ties; if multiple classes have the same prediction value and straddle the top-`k` boundary, all of those classes are considered to be in the top `k`. More formally, let \\(predictions_i\\) be the predictions for all classes for example `i`, \\(targets_i\\) be the target class for example `i`, \\(out_i\\) be the output for example `i`, $$out_i = predictions_{i, targets_i} \in TopKIncludingTies(predictions_i)$$ predictions: A `batch_size` x `classes` tensor. targets: A `batch_size` vector of class ids. k: Number of top elements to look at for computing precision. precision: Computed Precision at `k` as a `bool Tensor`. )doc"); namespace { Status TopKShapeFn(InferenceContext* c) { const Shape* input; TF_RETURN_IF_ERROR(c->WithRankAtLeast(c->input(0), 1, &input)); // Get the k value, either from input tensor or attribute. const Dimension* k_dim; if (c->num_inputs() >= 2) { TF_RETURN_IF_ERROR(c->MakeDimForScalarInput(1, &k_dim)); } else { int32 k; TF_RETURN_IF_ERROR(c->GetAttr("k", &k)); if (k < 0) { return errors::InvalidArgument("Need k >= 0, got ", k); } k_dim = c->MakeDim(k); } const Dimension* last_dim = c->Dim(input, -1); if (c->ValueKnown(last_dim) && c->ValueKnown(k_dim) && c->Value(last_dim) < c->Value(k_dim)) { return errors::InvalidArgument("input must have last dimension >= k = ", c->Value(k_dim), " but is ", c->Value(last_dim)); } // Replace last_dim with k_dim. const Shape* s; TF_RETURN_IF_ERROR(c->Subshape(input, 0, -1, &s)); TF_RETURN_IF_ERROR(c->Concatenate(s, c->MakeShape({k_dim}), &s)); c->set_output(0, s); c->set_output(1, s); return Status::OK(); } } // namespace REGISTER_OP("TopK") .Input("input: T") .Output("values: T") .Output("indices: int32") .Attr("k: int >= 0") .Attr("sorted: bool = true") .Attr("T: realnumbertype") .Deprecated(7, "Use TopKV2 instead") .SetShapeFn(OpShapeInferenceFn(TopKShapeFn)) .Doc(R"doc( Finds values and indices of the `k` largest elements for the last dimension. If the input is a vector (rank-1), finds the `k` largest entries in the vector and outputs their values and indices as vectors. Thus `values[j]` is the `j`-th largest entry in `input`, and its index is `indices[j]`. For matrices (resp. higher rank input), computes the top `k` entries in each row (resp. vector along the last dimension). Thus, values.shape = indices.shape = input.shape[:-1] + [k] If two elements are equal, the lower-index element appears first. If `k` varies dynamically, use `TopKV2` below. input: 1-D or higher with last dimension at least `k`. k: Number of top elements to look for along the last dimension (along each row for matrices). sorted: If true the resulting `k` elements will be sorted by the values in descending order. values: The `k` largest elements along each last dimensional slice. indices: The indices of `values` within the last dimension of `input`. )doc"); REGISTER_OP("TopKV2") .Input("input: T") .Input("k: int32") .Output("values: T") .Output("indices: int32") .Attr("sorted: bool = true") .Attr("T: realnumbertype") .SetShapeFn(OpShapeInferenceFn(TopKShapeFn)) .Doc(R"doc( Finds values and indices of the `k` largest elements for the last dimension. If the input is a vector (rank-1), finds the `k` largest entries in the vector and outputs their values and indices as vectors. Thus `values[j]` is the `j`-th largest entry in `input`, and its index is `indices[j]`. For matrices (resp. higher rank input), computes the top `k` entries in each row (resp. vector along the last dimension). Thus, values.shape = indices.shape = input.shape[:-1] + [k] If two elements are equal, the lower-index element appears first. This is the same as `TopK`, but takes `k` as in input rather than an attr. input: 1-D or higher with last dimension at least `k`. k: 0-D. Number of top elements to look for along the last dimension (along each row for matrices). sorted: If true the resulting `k` elements will be sorted by the values in descending order. values: The `k` largest elements along each last dimensional slice. indices: The indices of `values` within the last dimension of `input`. )doc"); } // namespace tensorflow
HaebinShin/tensorflow
tensorflow/core/ops/nn_ops.cc
C++
apache-2.0
45,533
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.lookoutmetrics.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.lookoutmetrics.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import static com.fasterxml.jackson.core.JsonToken.*; /** * DeactivateAnomalyDetectorResult JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DeactivateAnomalyDetectorResultJsonUnmarshaller implements Unmarshaller<DeactivateAnomalyDetectorResult, JsonUnmarshallerContext> { public DeactivateAnomalyDetectorResult unmarshall(JsonUnmarshallerContext context) throws Exception { DeactivateAnomalyDetectorResult deactivateAnomalyDetectorResult = new DeactivateAnomalyDetectorResult(); return deactivateAnomalyDetectorResult; } private static DeactivateAnomalyDetectorResultJsonUnmarshaller instance; public static DeactivateAnomalyDetectorResultJsonUnmarshaller getInstance() { if (instance == null) instance = new DeactivateAnomalyDetectorResultJsonUnmarshaller(); return instance; } }
aws/aws-sdk-java
aws-java-sdk-lookoutmetrics/src/main/java/com/amazonaws/services/lookoutmetrics/model/transform/DeactivateAnomalyDetectorResultJsonUnmarshaller.java
Java
apache-2.0
1,747
--- layout: post title: 使用PDF等矢量图适配 date: 2015-01-14 12:03:02 tags: 矢量图适配 excerpt: "用矢量图来适配." comments: true --- #使用PDF等矢量图适配 ####使用方法 在你的项目中有个文件夹叫 `Images.xcassets` 把pdf图放在这个文件夹里面 单击图片 右边工具栏可设置图片types 将其设置为`vestors` 给个详细链接吧 感谢此博主<http://blog.csdn.net/cuibo1123/article/details/39486197>
Onery/onery.github.io
_posts/2015-01-14-使用PDF等矢量图适配.md
Markdown
apache-2.0
461
package at.spenger.git.start; import java.time.Instant; import java.time.LocalDate; import java.time.Period; import java.time.ZoneId; import java.time.format.DateTimeFormatter; public class Haupt { public static void main(String[] args) { System.out.println("Hallo Welt!"); System.out.println("Ciao Mondo!"); System.out.println("Hello World!"); // ms seit 1.1.1970 long timeStamp = Instant.now().toEpochMilli(); String pattern = "yyyy.MM.dd"; pattern = "yyyy-MM-dd"; // Coordinated Universal Time (UTC) // based on International Atomic Time (TAI), introduced 1972 // With leap seconds --> 2012 TAI has been 35 s ahead of UTC DateTimeFormatter dtf = DateTimeFormatter.ofPattern(pattern).withZone(ZoneId.of("UTC")); System.out.println(dtf.format(Instant.now())); Period p = Period.between(LocalDate.parse("1968-09-27"), LocalDate.now()); System.out.printf("Jahre: %d ", p.getYears()); System.out.printf("Monate: %d Tage: %d", p.getMonths(), p.getDays()); } }
htlspenger2013/startgit
git-start/src/at/spenger/git/start/Haupt.java
Java
apache-2.0
1,052