id
int32
0
165k
repo
stringlengths
7
58
path
stringlengths
12
218
func_name
stringlengths
3
140
original_string
stringlengths
73
34.1k
language
stringclasses
1 value
code
stringlengths
73
34.1k
code_tokens
list
docstring
stringlengths
3
16k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
105
339
25,200
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java
ServiceHandler.activate
@Override public void activate(String topologyName) throws TException { try { NimbusUtils.transitionName(data, topologyName, true, StatusType.activate); notifyTopologyActionListener(topologyName, "activate"); } catch (NotAliveException e) { String errMsg = "Activate Error, topology " + topologyName + " is not alive!"; LOG.error(errMsg, e); throw new NotAliveException(errMsg); } catch (Exception e) { String errMsg = "Failed to activate topology " + topologyName; LOG.error(errMsg, e); throw new TException(errMsg); } }
java
@Override public void activate(String topologyName) throws TException { try { NimbusUtils.transitionName(data, topologyName, true, StatusType.activate); notifyTopologyActionListener(topologyName, "activate"); } catch (NotAliveException e) { String errMsg = "Activate Error, topology " + topologyName + " is not alive!"; LOG.error(errMsg, e); throw new NotAliveException(errMsg); } catch (Exception e) { String errMsg = "Failed to activate topology " + topologyName; LOG.error(errMsg, e); throw new TException(errMsg); } }
[ "@", "Override", "public", "void", "activate", "(", "String", "topologyName", ")", "throws", "TException", "{", "try", "{", "NimbusUtils", ".", "transitionName", "(", "data", ",", "topologyName", ",", "true", ",", "StatusType", ".", "activate", ")", ";", "notifyTopologyActionListener", "(", "topologyName", ",", "\"activate\"", ")", ";", "}", "catch", "(", "NotAliveException", "e", ")", "{", "String", "errMsg", "=", "\"Activate Error, topology \"", "+", "topologyName", "+", "\" is not alive!\"", ";", "LOG", ".", "error", "(", "errMsg", ",", "e", ")", ";", "throw", "new", "NotAliveException", "(", "errMsg", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "String", "errMsg", "=", "\"Failed to activate topology \"", "+", "topologyName", ";", "LOG", ".", "error", "(", "errMsg", ",", "e", ")", ";", "throw", "new", "TException", "(", "errMsg", ")", ";", "}", "}" ]
set topology status as active
[ "set", "topology", "status", "as", "active" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java#L422-L436
25,201
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java
ServiceHandler.rebalance
@Override public void rebalance(String topologyName, RebalanceOptions options) throws TException { try { checkTopologyActive(data, topologyName, true); Integer wait_amt = null; String jsonConf = null; Boolean reassign = false; if (options != null) { if (options.is_set_wait_secs()) wait_amt = options.get_wait_secs(); if (options.is_set_reassign()) reassign = options.is_reassign(); if (options.is_set_conf()) jsonConf = options.get_conf(); } LOG.info("Begin to rebalance " + topologyName + "wait_time:" + wait_amt + ", reassign: " + reassign + ", new worker/bolt configuration:" + jsonConf); Map<Object, Object> conf = (Map<Object, Object>) JStormUtils.from_json(jsonConf); NimbusUtils.transitionName(data, topologyName, true, StatusType.rebalance, wait_amt, reassign, conf); notifyTopologyActionListener(topologyName, "rebalance"); } catch (NotAliveException e) { String errMsg = "Rebalance error, topology " + topologyName + " is not alive!"; LOG.error(errMsg, e); throw new NotAliveException(errMsg); } catch (Exception e) { String errMsg = "Failed to rebalance topology " + topologyName; LOG.error(errMsg, e); throw new TException(errMsg); } }
java
@Override public void rebalance(String topologyName, RebalanceOptions options) throws TException { try { checkTopologyActive(data, topologyName, true); Integer wait_amt = null; String jsonConf = null; Boolean reassign = false; if (options != null) { if (options.is_set_wait_secs()) wait_amt = options.get_wait_secs(); if (options.is_set_reassign()) reassign = options.is_reassign(); if (options.is_set_conf()) jsonConf = options.get_conf(); } LOG.info("Begin to rebalance " + topologyName + "wait_time:" + wait_amt + ", reassign: " + reassign + ", new worker/bolt configuration:" + jsonConf); Map<Object, Object> conf = (Map<Object, Object>) JStormUtils.from_json(jsonConf); NimbusUtils.transitionName(data, topologyName, true, StatusType.rebalance, wait_amt, reassign, conf); notifyTopologyActionListener(topologyName, "rebalance"); } catch (NotAliveException e) { String errMsg = "Rebalance error, topology " + topologyName + " is not alive!"; LOG.error(errMsg, e); throw new NotAliveException(errMsg); } catch (Exception e) { String errMsg = "Failed to rebalance topology " + topologyName; LOG.error(errMsg, e); throw new TException(errMsg); } }
[ "@", "Override", "public", "void", "rebalance", "(", "String", "topologyName", ",", "RebalanceOptions", "options", ")", "throws", "TException", "{", "try", "{", "checkTopologyActive", "(", "data", ",", "topologyName", ",", "true", ")", ";", "Integer", "wait_amt", "=", "null", ";", "String", "jsonConf", "=", "null", ";", "Boolean", "reassign", "=", "false", ";", "if", "(", "options", "!=", "null", ")", "{", "if", "(", "options", ".", "is_set_wait_secs", "(", ")", ")", "wait_amt", "=", "options", ".", "get_wait_secs", "(", ")", ";", "if", "(", "options", ".", "is_set_reassign", "(", ")", ")", "reassign", "=", "options", ".", "is_reassign", "(", ")", ";", "if", "(", "options", ".", "is_set_conf", "(", ")", ")", "jsonConf", "=", "options", ".", "get_conf", "(", ")", ";", "}", "LOG", ".", "info", "(", "\"Begin to rebalance \"", "+", "topologyName", "+", "\"wait_time:\"", "+", "wait_amt", "+", "\", reassign: \"", "+", "reassign", "+", "\", new worker/bolt configuration:\"", "+", "jsonConf", ")", ";", "Map", "<", "Object", ",", "Object", ">", "conf", "=", "(", "Map", "<", "Object", ",", "Object", ">", ")", "JStormUtils", ".", "from_json", "(", "jsonConf", ")", ";", "NimbusUtils", ".", "transitionName", "(", "data", ",", "topologyName", ",", "true", ",", "StatusType", ".", "rebalance", ",", "wait_amt", ",", "reassign", ",", "conf", ")", ";", "notifyTopologyActionListener", "(", "topologyName", ",", "\"rebalance\"", ")", ";", "}", "catch", "(", "NotAliveException", "e", ")", "{", "String", "errMsg", "=", "\"Rebalance error, topology \"", "+", "topologyName", "+", "\" is not alive!\"", ";", "LOG", ".", "error", "(", "errMsg", ",", "e", ")", ";", "throw", "new", "NotAliveException", "(", "errMsg", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "String", "errMsg", "=", "\"Failed to rebalance topology \"", "+", "topologyName", ";", "LOG", ".", "error", "(", "errMsg", ",", "e", ")", ";", "throw", "new", "TException", "(", "errMsg", ")", ";", "}", "}" ]
rebalance a topology @param topologyName topology name @param options RebalanceOptions
[ "rebalance", "a", "topology" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java#L463-L497
25,202
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java
ServiceHandler.beginFileUpload
@Override public String beginFileUpload() throws TException { String fileLoc = null; try { String path; String key = UUID.randomUUID().toString(); path = StormConfig.masterInbox(conf) + "/" + key; FileUtils.forceMkdir(new File(path)); FileUtils.cleanDirectory(new File(path)); fileLoc = path + "/stormjar-" + key + ".jar"; data.getUploaders().put(fileLoc, Channels.newChannel(new FileOutputStream(fileLoc))); LOG.info("Begin upload file from client to " + fileLoc); return path; } catch (FileNotFoundException e) { LOG.error("File not found: " + fileLoc, e); throw new TException(e); } catch (IOException e) { LOG.error("Upload file error: " + fileLoc, e); throw new TException(e); } }
java
@Override public String beginFileUpload() throws TException { String fileLoc = null; try { String path; String key = UUID.randomUUID().toString(); path = StormConfig.masterInbox(conf) + "/" + key; FileUtils.forceMkdir(new File(path)); FileUtils.cleanDirectory(new File(path)); fileLoc = path + "/stormjar-" + key + ".jar"; data.getUploaders().put(fileLoc, Channels.newChannel(new FileOutputStream(fileLoc))); LOG.info("Begin upload file from client to " + fileLoc); return path; } catch (FileNotFoundException e) { LOG.error("File not found: " + fileLoc, e); throw new TException(e); } catch (IOException e) { LOG.error("Upload file error: " + fileLoc, e); throw new TException(e); } }
[ "@", "Override", "public", "String", "beginFileUpload", "(", ")", "throws", "TException", "{", "String", "fileLoc", "=", "null", ";", "try", "{", "String", "path", ";", "String", "key", "=", "UUID", ".", "randomUUID", "(", ")", ".", "toString", "(", ")", ";", "path", "=", "StormConfig", ".", "masterInbox", "(", "conf", ")", "+", "\"/\"", "+", "key", ";", "FileUtils", ".", "forceMkdir", "(", "new", "File", "(", "path", ")", ")", ";", "FileUtils", ".", "cleanDirectory", "(", "new", "File", "(", "path", ")", ")", ";", "fileLoc", "=", "path", "+", "\"/stormjar-\"", "+", "key", "+", "\".jar\"", ";", "data", ".", "getUploaders", "(", ")", ".", "put", "(", "fileLoc", ",", "Channels", ".", "newChannel", "(", "new", "FileOutputStream", "(", "fileLoc", ")", ")", ")", ";", "LOG", ".", "info", "(", "\"Begin upload file from client to \"", "+", "fileLoc", ")", ";", "return", "path", ";", "}", "catch", "(", "FileNotFoundException", "e", ")", "{", "LOG", ".", "error", "(", "\"File not found: \"", "+", "fileLoc", ",", "e", ")", ";", "throw", "new", "TException", "(", "e", ")", ";", "}", "catch", "(", "IOException", "e", ")", "{", "LOG", ".", "error", "(", "\"Upload file error: \"", "+", "fileLoc", ",", "e", ")", ";", "throw", "new", "TException", "(", "e", ")", ";", "}", "}" ]
prepare to upload topology jar, return the file location
[ "prepare", "to", "upload", "topology", "jar", "return", "the", "file", "location" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java#L625-L646
25,203
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java
ServiceHandler.uploadChunk
@Override public void uploadChunk(String location, ByteBuffer chunk) throws TException { TimeCacheMap<Object, Object> uploaders = data.getUploaders(); Object obj = uploaders.get(location); if (obj == null) { throw new TException("File for that location does not exist (or timed out) " + location); } try { if (obj instanceof WritableByteChannel) { WritableByteChannel channel = (WritableByteChannel) obj; channel.write(chunk); uploaders.put(location, channel); } else { throw new TException("Object isn't WritableByteChannel for " + location); } } catch (IOException e) { String errMsg = " WritableByteChannel write filed when uploadChunk " + location; LOG.error(errMsg); throw new TException(e); } }
java
@Override public void uploadChunk(String location, ByteBuffer chunk) throws TException { TimeCacheMap<Object, Object> uploaders = data.getUploaders(); Object obj = uploaders.get(location); if (obj == null) { throw new TException("File for that location does not exist (or timed out) " + location); } try { if (obj instanceof WritableByteChannel) { WritableByteChannel channel = (WritableByteChannel) obj; channel.write(chunk); uploaders.put(location, channel); } else { throw new TException("Object isn't WritableByteChannel for " + location); } } catch (IOException e) { String errMsg = " WritableByteChannel write filed when uploadChunk " + location; LOG.error(errMsg); throw new TException(e); } }
[ "@", "Override", "public", "void", "uploadChunk", "(", "String", "location", ",", "ByteBuffer", "chunk", ")", "throws", "TException", "{", "TimeCacheMap", "<", "Object", ",", "Object", ">", "uploaders", "=", "data", ".", "getUploaders", "(", ")", ";", "Object", "obj", "=", "uploaders", ".", "get", "(", "location", ")", ";", "if", "(", "obj", "==", "null", ")", "{", "throw", "new", "TException", "(", "\"File for that location does not exist (or timed out) \"", "+", "location", ")", ";", "}", "try", "{", "if", "(", "obj", "instanceof", "WritableByteChannel", ")", "{", "WritableByteChannel", "channel", "=", "(", "WritableByteChannel", ")", "obj", ";", "channel", ".", "write", "(", "chunk", ")", ";", "uploaders", ".", "put", "(", "location", ",", "channel", ")", ";", "}", "else", "{", "throw", "new", "TException", "(", "\"Object isn't WritableByteChannel for \"", "+", "location", ")", ";", "}", "}", "catch", "(", "IOException", "e", ")", "{", "String", "errMsg", "=", "\" WritableByteChannel write filed when uploadChunk \"", "+", "location", ";", "LOG", ".", "error", "(", "errMsg", ")", ";", "throw", "new", "TException", "(", "e", ")", ";", "}", "}" ]
upload topology jar data
[ "upload", "topology", "jar", "data" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java#L651-L672
25,204
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java
ServiceHandler.getClusterInfo
@Override public ClusterSummary getClusterInfo() throws TException { long start = System.nanoTime(); try { StormClusterState stormClusterState = data.getStormClusterState(); Map<String, Assignment> assignments = new HashMap<>(); // get TopologySummary List<TopologySummary> topologySummaries = NimbusUtils.getTopologySummary(stormClusterState, assignments); // all supervisors Map<String, SupervisorInfo> supervisorInfos = Cluster.get_all_SupervisorInfo(stormClusterState, null); // generate SupervisorSummaries List<SupervisorSummary> supervisorSummaries = NimbusUtils.mkSupervisorSummaries(supervisorInfos, assignments); NimbusSummary nimbusSummary = NimbusUtils.getNimbusSummary(stormClusterState, supervisorSummaries, data); return new ClusterSummary(nimbusSummary, supervisorSummaries, topologySummaries); } catch (TException e) { LOG.info("Failed to get ClusterSummary ", e); throw e; } catch (Exception e) { LOG.info("Failed to get ClusterSummary ", e); throw new TException(e); } finally { long end = System.nanoTime(); SimpleJStormMetric.updateNimbusHistogram("getClusterInfo", (end - start) / TimeUtils.NS_PER_US); } }
java
@Override public ClusterSummary getClusterInfo() throws TException { long start = System.nanoTime(); try { StormClusterState stormClusterState = data.getStormClusterState(); Map<String, Assignment> assignments = new HashMap<>(); // get TopologySummary List<TopologySummary> topologySummaries = NimbusUtils.getTopologySummary(stormClusterState, assignments); // all supervisors Map<String, SupervisorInfo> supervisorInfos = Cluster.get_all_SupervisorInfo(stormClusterState, null); // generate SupervisorSummaries List<SupervisorSummary> supervisorSummaries = NimbusUtils.mkSupervisorSummaries(supervisorInfos, assignments); NimbusSummary nimbusSummary = NimbusUtils.getNimbusSummary(stormClusterState, supervisorSummaries, data); return new ClusterSummary(nimbusSummary, supervisorSummaries, topologySummaries); } catch (TException e) { LOG.info("Failed to get ClusterSummary ", e); throw e; } catch (Exception e) { LOG.info("Failed to get ClusterSummary ", e); throw new TException(e); } finally { long end = System.nanoTime(); SimpleJStormMetric.updateNimbusHistogram("getClusterInfo", (end - start) / TimeUtils.NS_PER_US); } }
[ "@", "Override", "public", "ClusterSummary", "getClusterInfo", "(", ")", "throws", "TException", "{", "long", "start", "=", "System", ".", "nanoTime", "(", ")", ";", "try", "{", "StormClusterState", "stormClusterState", "=", "data", ".", "getStormClusterState", "(", ")", ";", "Map", "<", "String", ",", "Assignment", ">", "assignments", "=", "new", "HashMap", "<>", "(", ")", ";", "// get TopologySummary", "List", "<", "TopologySummary", ">", "topologySummaries", "=", "NimbusUtils", ".", "getTopologySummary", "(", "stormClusterState", ",", "assignments", ")", ";", "// all supervisors", "Map", "<", "String", ",", "SupervisorInfo", ">", "supervisorInfos", "=", "Cluster", ".", "get_all_SupervisorInfo", "(", "stormClusterState", ",", "null", ")", ";", "// generate SupervisorSummaries", "List", "<", "SupervisorSummary", ">", "supervisorSummaries", "=", "NimbusUtils", ".", "mkSupervisorSummaries", "(", "supervisorInfos", ",", "assignments", ")", ";", "NimbusSummary", "nimbusSummary", "=", "NimbusUtils", ".", "getNimbusSummary", "(", "stormClusterState", ",", "supervisorSummaries", ",", "data", ")", ";", "return", "new", "ClusterSummary", "(", "nimbusSummary", ",", "supervisorSummaries", ",", "topologySummaries", ")", ";", "}", "catch", "(", "TException", "e", ")", "{", "LOG", ".", "info", "(", "\"Failed to get ClusterSummary \"", ",", "e", ")", ";", "throw", "e", ";", "}", "catch", "(", "Exception", "e", ")", "{", "LOG", ".", "info", "(", "\"Failed to get ClusterSummary \"", ",", "e", ")", ";", "throw", "new", "TException", "(", "e", ")", ";", "}", "finally", "{", "long", "end", "=", "System", ".", "nanoTime", "(", ")", ";", "SimpleJStormMetric", ".", "updateNimbusHistogram", "(", "\"getClusterInfo\"", ",", "(", "end", "-", "start", ")", "/", "TimeUtils", ".", "NS_PER_US", ")", ";", "}", "}" ]
get cluster's summary, it will contain SupervisorSummary and TopologySummary @return ClusterSummary
[ "get", "cluster", "s", "summary", "it", "will", "contain", "SupervisorSummary", "and", "TopologySummary" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java#L951-L981
25,205
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java
ServiceHandler.getTopologyConf
@Override public String getTopologyConf(String id) throws TException { String rtn; try { Map<Object, Object> topologyConf = StormConfig.read_nimbus_topology_conf(id, data.getBlobStore()); rtn = JStormUtils.to_json(topologyConf); } catch (IOException e) { LOG.info("Failed to get configuration of " + id, e); throw new TException(e); } return rtn; }
java
@Override public String getTopologyConf(String id) throws TException { String rtn; try { Map<Object, Object> topologyConf = StormConfig.read_nimbus_topology_conf(id, data.getBlobStore()); rtn = JStormUtils.to_json(topologyConf); } catch (IOException e) { LOG.info("Failed to get configuration of " + id, e); throw new TException(e); } return rtn; }
[ "@", "Override", "public", "String", "getTopologyConf", "(", "String", "id", ")", "throws", "TException", "{", "String", "rtn", ";", "try", "{", "Map", "<", "Object", ",", "Object", ">", "topologyConf", "=", "StormConfig", ".", "read_nimbus_topology_conf", "(", "id", ",", "data", ".", "getBlobStore", "(", ")", ")", ";", "rtn", "=", "JStormUtils", ".", "to_json", "(", "topologyConf", ")", ";", "}", "catch", "(", "IOException", "e", ")", "{", "LOG", ".", "info", "(", "\"Failed to get configuration of \"", "+", "id", ",", "e", ")", ";", "throw", "new", "TException", "(", "e", ")", ";", "}", "return", "rtn", ";", "}" ]
get topology configuration @param id String: topology id @return String
[ "get", "topology", "configuration" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java#L1399-L1410
25,206
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java
ServiceHandler.getTopology
@Override public StormTopology getTopology(String id) throws TException { StormTopology topology; try { StormTopology stormtopology = StormConfig.read_nimbus_topology_code(id, data.getBlobStore()); if (stormtopology == null) { throw new NotAliveException("No topology of " + id); } Map<Object, Object> topologyConf = (Map<Object, Object>) StormConfig.read_nimbus_topology_conf(id, data.getBlobStore()); topology = Common.system_topology(topologyConf, stormtopology); } catch (Exception e) { LOG.error("Failed to get topology " + id + ",", e); throw new TException("Failed to get system_topology"); } return topology; }
java
@Override public StormTopology getTopology(String id) throws TException { StormTopology topology; try { StormTopology stormtopology = StormConfig.read_nimbus_topology_code(id, data.getBlobStore()); if (stormtopology == null) { throw new NotAliveException("No topology of " + id); } Map<Object, Object> topologyConf = (Map<Object, Object>) StormConfig.read_nimbus_topology_conf(id, data.getBlobStore()); topology = Common.system_topology(topologyConf, stormtopology); } catch (Exception e) { LOG.error("Failed to get topology " + id + ",", e); throw new TException("Failed to get system_topology"); } return topology; }
[ "@", "Override", "public", "StormTopology", "getTopology", "(", "String", "id", ")", "throws", "TException", "{", "StormTopology", "topology", ";", "try", "{", "StormTopology", "stormtopology", "=", "StormConfig", ".", "read_nimbus_topology_code", "(", "id", ",", "data", ".", "getBlobStore", "(", ")", ")", ";", "if", "(", "stormtopology", "==", "null", ")", "{", "throw", "new", "NotAliveException", "(", "\"No topology of \"", "+", "id", ")", ";", "}", "Map", "<", "Object", ",", "Object", ">", "topologyConf", "=", "(", "Map", "<", "Object", ",", "Object", ">", ")", "StormConfig", ".", "read_nimbus_topology_conf", "(", "id", ",", "data", ".", "getBlobStore", "(", ")", ")", ";", "topology", "=", "Common", ".", "system_topology", "(", "topologyConf", ",", "stormtopology", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "LOG", ".", "error", "(", "\"Failed to get topology \"", "+", "id", "+", "\",\"", ",", "e", ")", ";", "throw", "new", "TException", "(", "\"Failed to get system_topology\"", ")", ";", "}", "return", "topology", ";", "}" ]
get StormTopology throw deserialize local files @param id String: topology id @return StormTopology
[ "get", "StormTopology", "throw", "deserialize", "local", "files" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java#L1438-L1454
25,207
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java
ServiceHandler.checkTopologyActive
public void checkTopologyActive(NimbusData nimbus, String topologyName, boolean bActive) throws Exception { if (isTopologyActive(nimbus.getStormClusterState(), topologyName) != bActive) { if (bActive) { throw new NotAliveException(topologyName + " is not alive"); } else { throw new AlreadyAliveException(topologyName + " is already alive"); } } }
java
public void checkTopologyActive(NimbusData nimbus, String topologyName, boolean bActive) throws Exception { if (isTopologyActive(nimbus.getStormClusterState(), topologyName) != bActive) { if (bActive) { throw new NotAliveException(topologyName + " is not alive"); } else { throw new AlreadyAliveException(topologyName + " is already alive"); } } }
[ "public", "void", "checkTopologyActive", "(", "NimbusData", "nimbus", ",", "String", "topologyName", ",", "boolean", "bActive", ")", "throws", "Exception", "{", "if", "(", "isTopologyActive", "(", "nimbus", ".", "getStormClusterState", "(", ")", ",", "topologyName", ")", "!=", "bActive", ")", "{", "if", "(", "bActive", ")", "{", "throw", "new", "NotAliveException", "(", "topologyName", "+", "\" is not alive\"", ")", ";", "}", "else", "{", "throw", "new", "AlreadyAliveException", "(", "topologyName", "+", "\" is already alive\"", ")", ";", "}", "}", "}" ]
check whether the topology is bActive? @throws Exception
[ "check", "whether", "the", "topology", "is", "bActive?" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java#L1476-L1484
25,208
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java
ServiceHandler.isTopologyActive
public boolean isTopologyActive(StormClusterState stormClusterState, String topologyName) throws Exception { boolean rtn = false; if (Cluster.get_topology_id(stormClusterState, topologyName) != null) { rtn = true; } return rtn; }
java
public boolean isTopologyActive(StormClusterState stormClusterState, String topologyName) throws Exception { boolean rtn = false; if (Cluster.get_topology_id(stormClusterState, topologyName) != null) { rtn = true; } return rtn; }
[ "public", "boolean", "isTopologyActive", "(", "StormClusterState", "stormClusterState", ",", "String", "topologyName", ")", "throws", "Exception", "{", "boolean", "rtn", "=", "false", ";", "if", "(", "Cluster", ".", "get_topology_id", "(", "stormClusterState", ",", "topologyName", ")", "!=", "null", ")", "{", "rtn", "=", "true", ";", "}", "return", "rtn", ";", "}" ]
whether the topology is active by topology name @param stormClusterState see Cluster_clj @param topologyName topology name @return boolean if the storm is active, return true, otherwise return false
[ "whether", "the", "topology", "is", "active", "by", "topology", "name" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/nimbus/ServiceHandler.java#L1493-L1499
25,209
alibaba/jstorm
jstorm-hdfs/src/main/java/com/alibaba/jstorm/hdfs/common/HdfsUtils.java
HdfsUtils.listFilesByModificationTime
public static ArrayList<Path> listFilesByModificationTime(FileSystem fs, Path directory, long olderThan) throws IOException { ArrayList<LocatedFileStatus> fstats = new ArrayList<>(); RemoteIterator<LocatedFileStatus> itr = fs.listFiles(directory, false); while( itr.hasNext() ) { LocatedFileStatus fileStatus = itr.next(); if(olderThan>0) { if( fileStatus.getModificationTime()<=olderThan ) fstats.add(fileStatus); } else { fstats.add(fileStatus); } } Collections.sort(fstats, new ModifTimeComparator() ); ArrayList<Path> result = new ArrayList<>(fstats.size()); for (LocatedFileStatus fstat : fstats) { result.add(fstat.getPath()); } return result; }
java
public static ArrayList<Path> listFilesByModificationTime(FileSystem fs, Path directory, long olderThan) throws IOException { ArrayList<LocatedFileStatus> fstats = new ArrayList<>(); RemoteIterator<LocatedFileStatus> itr = fs.listFiles(directory, false); while( itr.hasNext() ) { LocatedFileStatus fileStatus = itr.next(); if(olderThan>0) { if( fileStatus.getModificationTime()<=olderThan ) fstats.add(fileStatus); } else { fstats.add(fileStatus); } } Collections.sort(fstats, new ModifTimeComparator() ); ArrayList<Path> result = new ArrayList<>(fstats.size()); for (LocatedFileStatus fstat : fstats) { result.add(fstat.getPath()); } return result; }
[ "public", "static", "ArrayList", "<", "Path", ">", "listFilesByModificationTime", "(", "FileSystem", "fs", ",", "Path", "directory", ",", "long", "olderThan", ")", "throws", "IOException", "{", "ArrayList", "<", "LocatedFileStatus", ">", "fstats", "=", "new", "ArrayList", "<>", "(", ")", ";", "RemoteIterator", "<", "LocatedFileStatus", ">", "itr", "=", "fs", ".", "listFiles", "(", "directory", ",", "false", ")", ";", "while", "(", "itr", ".", "hasNext", "(", ")", ")", "{", "LocatedFileStatus", "fileStatus", "=", "itr", ".", "next", "(", ")", ";", "if", "(", "olderThan", ">", "0", ")", "{", "if", "(", "fileStatus", ".", "getModificationTime", "(", ")", "<=", "olderThan", ")", "fstats", ".", "add", "(", "fileStatus", ")", ";", "}", "else", "{", "fstats", ".", "add", "(", "fileStatus", ")", ";", "}", "}", "Collections", ".", "sort", "(", "fstats", ",", "new", "ModifTimeComparator", "(", ")", ")", ";", "ArrayList", "<", "Path", ">", "result", "=", "new", "ArrayList", "<>", "(", "fstats", ".", "size", "(", ")", ")", ";", "for", "(", "LocatedFileStatus", "fstat", ":", "fstats", ")", "{", "result", ".", "add", "(", "fstat", ".", "getPath", "(", ")", ")", ";", "}", "return", "result", ";", "}" ]
list files sorted by modification time that have not been modified since 'olderThan'. if 'olderThan' is <= 0 then the filtering is disabled
[ "list", "files", "sorted", "by", "modification", "time", "that", "have", "not", "been", "modified", "since", "olderThan", ".", "if", "olderThan", "is", "<", "=", "0", "then", "the", "filtering", "is", "disabled" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-hdfs/src/main/java/com/alibaba/jstorm/hdfs/common/HdfsUtils.java#L38-L60
25,210
alibaba/jstorm
jstorm-hdfs/src/main/java/com/alibaba/jstorm/hdfs/common/HdfsUtils.java
HdfsUtils.tryCreateFile
public static FSDataOutputStream tryCreateFile(FileSystem fs, Path file) throws IOException { try { FSDataOutputStream os = fs.create(file, false); return os; } catch (FileAlreadyExistsException e) { return null; } catch (RemoteException e) { if( e.unwrapRemoteException() instanceof AlreadyBeingCreatedException ) { return null; } else { // unexpected error throw e; } } }
java
public static FSDataOutputStream tryCreateFile(FileSystem fs, Path file) throws IOException { try { FSDataOutputStream os = fs.create(file, false); return os; } catch (FileAlreadyExistsException e) { return null; } catch (RemoteException e) { if( e.unwrapRemoteException() instanceof AlreadyBeingCreatedException ) { return null; } else { // unexpected error throw e; } } }
[ "public", "static", "FSDataOutputStream", "tryCreateFile", "(", "FileSystem", "fs", ",", "Path", "file", ")", "throws", "IOException", "{", "try", "{", "FSDataOutputStream", "os", "=", "fs", ".", "create", "(", "file", ",", "false", ")", ";", "return", "os", ";", "}", "catch", "(", "FileAlreadyExistsException", "e", ")", "{", "return", "null", ";", "}", "catch", "(", "RemoteException", "e", ")", "{", "if", "(", "e", ".", "unwrapRemoteException", "(", ")", "instanceof", "AlreadyBeingCreatedException", ")", "{", "return", "null", ";", "}", "else", "{", "// unexpected error", "throw", "e", ";", "}", "}", "}" ]
Returns null if file already exists. throws if there was unexpected problem
[ "Returns", "null", "if", "file", "already", "exists", ".", "throws", "if", "there", "was", "unexpected", "problem" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-hdfs/src/main/java/com/alibaba/jstorm/hdfs/common/HdfsUtils.java#L65-L78
25,211
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/transactional/TransactionTopologyBuilder.java
TransactionTopologyBuilder.setSpoutWithAck
public SpoutDeclarer setSpoutWithAck(String id, IRichSpout spout, Number parallelismHint) { return setSpout(id, new AckTransactionSpout(spout), parallelismHint); }
java
public SpoutDeclarer setSpoutWithAck(String id, IRichSpout spout, Number parallelismHint) { return setSpout(id, new AckTransactionSpout(spout), parallelismHint); }
[ "public", "SpoutDeclarer", "setSpoutWithAck", "(", "String", "id", ",", "IRichSpout", "spout", ",", "Number", "parallelismHint", ")", "{", "return", "setSpout", "(", "id", ",", "new", "AckTransactionSpout", "(", "spout", ")", ",", "parallelismHint", ")", ";", "}" ]
Build spout to provide the compatibility with Storm's ack mechanism @param id spout Id @param spout @return
[ "Build", "spout", "to", "provide", "the", "compatibility", "with", "Storm", "s", "ack", "mechanism" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/transactional/TransactionTopologyBuilder.java#L117-L119
25,212
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/transactional/TransactionTopologyBuilder.java
TransactionTopologyBuilder.setBoltWithAck
public BoltDeclarer setBoltWithAck(String id, IRichBolt bolt, Number parallelismHint) { return setBolt(id, new AckTransactionBolt(bolt), parallelismHint); }
java
public BoltDeclarer setBoltWithAck(String id, IRichBolt bolt, Number parallelismHint) { return setBolt(id, new AckTransactionBolt(bolt), parallelismHint); }
[ "public", "BoltDeclarer", "setBoltWithAck", "(", "String", "id", ",", "IRichBolt", "bolt", ",", "Number", "parallelismHint", ")", "{", "return", "setBolt", "(", "id", ",", "new", "AckTransactionBolt", "(", "bolt", ")", ",", "parallelismHint", ")", ";", "}" ]
Build bolt to provide the compatibility with Storm's ack mechanism @param id bolt Id @param bolt @return
[ "Build", "bolt", "to", "provide", "the", "compatibility", "with", "Storm", "s", "ack", "mechanism" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/transactional/TransactionTopologyBuilder.java#L181-L183
25,213
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/message/netty/NettyContext.java
NettyContext.prepare
@SuppressWarnings("rawtypes") public void prepare(Map stormConf) { this.stormConf = stormConf; int maxWorkers = Utils.getInt(stormConf.get(Config.STORM_MESSAGING_NETTY_CLIENT_WORKER_THREADS)); ThreadFactory bossFactory = new NettyRenameThreadFactory(MetricDef.NETTY_CLI + "boss"); ThreadFactory workerFactory = new NettyRenameThreadFactory(MetricDef.NETTY_CLI + "worker"); if (maxWorkers > 0) { clientChannelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(bossFactory), Executors.newCachedThreadPool(workerFactory), maxWorkers); } else { clientChannelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(bossFactory), Executors.newCachedThreadPool(workerFactory)); } reconnector = new ReconnectRunnable(); new AsyncLoopThread(reconnector, true, Thread.MIN_PRIORITY, true); }
java
@SuppressWarnings("rawtypes") public void prepare(Map stormConf) { this.stormConf = stormConf; int maxWorkers = Utils.getInt(stormConf.get(Config.STORM_MESSAGING_NETTY_CLIENT_WORKER_THREADS)); ThreadFactory bossFactory = new NettyRenameThreadFactory(MetricDef.NETTY_CLI + "boss"); ThreadFactory workerFactory = new NettyRenameThreadFactory(MetricDef.NETTY_CLI + "worker"); if (maxWorkers > 0) { clientChannelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(bossFactory), Executors.newCachedThreadPool(workerFactory), maxWorkers); } else { clientChannelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(bossFactory), Executors.newCachedThreadPool(workerFactory)); } reconnector = new ReconnectRunnable(); new AsyncLoopThread(reconnector, true, Thread.MIN_PRIORITY, true); }
[ "@", "SuppressWarnings", "(", "\"rawtypes\"", ")", "public", "void", "prepare", "(", "Map", "stormConf", ")", "{", "this", ".", "stormConf", "=", "stormConf", ";", "int", "maxWorkers", "=", "Utils", ".", "getInt", "(", "stormConf", ".", "get", "(", "Config", ".", "STORM_MESSAGING_NETTY_CLIENT_WORKER_THREADS", ")", ")", ";", "ThreadFactory", "bossFactory", "=", "new", "NettyRenameThreadFactory", "(", "MetricDef", ".", "NETTY_CLI", "+", "\"boss\"", ")", ";", "ThreadFactory", "workerFactory", "=", "new", "NettyRenameThreadFactory", "(", "MetricDef", ".", "NETTY_CLI", "+", "\"worker\"", ")", ";", "if", "(", "maxWorkers", ">", "0", ")", "{", "clientChannelFactory", "=", "new", "NioClientSocketChannelFactory", "(", "Executors", ".", "newCachedThreadPool", "(", "bossFactory", ")", ",", "Executors", ".", "newCachedThreadPool", "(", "workerFactory", ")", ",", "maxWorkers", ")", ";", "}", "else", "{", "clientChannelFactory", "=", "new", "NioClientSocketChannelFactory", "(", "Executors", ".", "newCachedThreadPool", "(", "bossFactory", ")", ",", "Executors", ".", "newCachedThreadPool", "(", "workerFactory", ")", ")", ";", "}", "reconnector", "=", "new", "ReconnectRunnable", "(", ")", ";", "new", "AsyncLoopThread", "(", "reconnector", ",", "true", ",", "Thread", ".", "MIN_PRIORITY", ",", "true", ")", ";", "}" ]
initialization per Storm configuration
[ "initialization", "per", "Storm", "configuration" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/message/netty/NettyContext.java#L55-L73
25,214
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/SchedulerAssignmentImpl.java
SchedulerAssignmentImpl.assign
public void assign(WorkerSlot slot, Collection<ExecutorDetails> executors) { for (ExecutorDetails executor : executors) { this.executorToSlot.put(executor, slot); } }
java
public void assign(WorkerSlot slot, Collection<ExecutorDetails> executors) { for (ExecutorDetails executor : executors) { this.executorToSlot.put(executor, slot); } }
[ "public", "void", "assign", "(", "WorkerSlot", "slot", ",", "Collection", "<", "ExecutorDetails", ">", "executors", ")", "{", "for", "(", "ExecutorDetails", "executor", ":", "executors", ")", "{", "this", ".", "executorToSlot", ".", "put", "(", "executor", ",", "slot", ")", ";", "}", "}" ]
Assign the slot to executors.
[ "Assign", "the", "slot", "to", "executors", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/SchedulerAssignmentImpl.java#L55-L59
25,215
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/SchedulerAssignmentImpl.java
SchedulerAssignmentImpl.unassignBySlot
public void unassignBySlot(WorkerSlot slot) { List<ExecutorDetails> executors = new ArrayList<>(); for (ExecutorDetails executor : this.executorToSlot.keySet()) { WorkerSlot ws = this.executorToSlot.get(executor); if (ws.equals(slot)) { executors.add(executor); } } // remove for (ExecutorDetails executor : executors) { this.executorToSlot.remove(executor); } }
java
public void unassignBySlot(WorkerSlot slot) { List<ExecutorDetails> executors = new ArrayList<>(); for (ExecutorDetails executor : this.executorToSlot.keySet()) { WorkerSlot ws = this.executorToSlot.get(executor); if (ws.equals(slot)) { executors.add(executor); } } // remove for (ExecutorDetails executor : executors) { this.executorToSlot.remove(executor); } }
[ "public", "void", "unassignBySlot", "(", "WorkerSlot", "slot", ")", "{", "List", "<", "ExecutorDetails", ">", "executors", "=", "new", "ArrayList", "<>", "(", ")", ";", "for", "(", "ExecutorDetails", "executor", ":", "this", ".", "executorToSlot", ".", "keySet", "(", ")", ")", "{", "WorkerSlot", "ws", "=", "this", ".", "executorToSlot", ".", "get", "(", "executor", ")", ";", "if", "(", "ws", ".", "equals", "(", "slot", ")", ")", "{", "executors", ".", "add", "(", "executor", ")", ";", "}", "}", "// remove", "for", "(", "ExecutorDetails", "executor", ":", "executors", ")", "{", "this", ".", "executorToSlot", ".", "remove", "(", "executor", ")", ";", "}", "}" ]
Release the slot occupied by this assignment.
[ "Release", "the", "slot", "occupied", "by", "this", "assignment", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/SchedulerAssignmentImpl.java#L64-L77
25,216
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/windowing/WatermarkCountTriggerPolicy.java
WatermarkCountTriggerPolicy.handleWaterMarkEvent
private void handleWaterMarkEvent(Event<T> waterMarkEvent) { long watermarkTs = waterMarkEvent.getTimestamp(); List<Long> eventTs = windowManager.getSlidingCountTimestamps(lastProcessedTs, watermarkTs, count); for (long ts : eventTs) { evictionPolicy.setContext(ts); handler.onTrigger(); lastProcessedTs = ts; } }
java
private void handleWaterMarkEvent(Event<T> waterMarkEvent) { long watermarkTs = waterMarkEvent.getTimestamp(); List<Long> eventTs = windowManager.getSlidingCountTimestamps(lastProcessedTs, watermarkTs, count); for (long ts : eventTs) { evictionPolicy.setContext(ts); handler.onTrigger(); lastProcessedTs = ts; } }
[ "private", "void", "handleWaterMarkEvent", "(", "Event", "<", "T", ">", "waterMarkEvent", ")", "{", "long", "watermarkTs", "=", "waterMarkEvent", ".", "getTimestamp", "(", ")", ";", "List", "<", "Long", ">", "eventTs", "=", "windowManager", ".", "getSlidingCountTimestamps", "(", "lastProcessedTs", ",", "watermarkTs", ",", "count", ")", ";", "for", "(", "long", "ts", ":", "eventTs", ")", "{", "evictionPolicy", ".", "setContext", "(", "ts", ")", ";", "handler", ".", "onTrigger", "(", ")", ";", "lastProcessedTs", "=", "ts", ";", "}", "}" ]
Triggers all the pending windows up to the waterMarkEvent timestamp based on the sliding interval count. @param waterMarkEvent the watermark event
[ "Triggers", "all", "the", "pending", "windows", "up", "to", "the", "waterMarkEvent", "timestamp", "based", "on", "the", "sliding", "interval", "count", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/windowing/WatermarkCountTriggerPolicy.java#L73-L81
25,217
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/SimpleTransportPlugin.java
SimpleTransportPlugin.connect
@Override public TTransport connect(TTransport transport, String serverHost, String asUser) throws TTransportException { int maxBufferSize = type.getMaxBufferSize(storm_conf); // create a framed transport TTransport conn = new TFramedTransport(transport, maxBufferSize); // connect conn.open(); LOG.debug("Simple client transport has been established"); return conn; }
java
@Override public TTransport connect(TTransport transport, String serverHost, String asUser) throws TTransportException { int maxBufferSize = type.getMaxBufferSize(storm_conf); // create a framed transport TTransport conn = new TFramedTransport(transport, maxBufferSize); // connect conn.open(); LOG.debug("Simple client transport has been established"); return conn; }
[ "@", "Override", "public", "TTransport", "connect", "(", "TTransport", "transport", ",", "String", "serverHost", ",", "String", "asUser", ")", "throws", "TTransportException", "{", "int", "maxBufferSize", "=", "type", ".", "getMaxBufferSize", "(", "storm_conf", ")", ";", "// create a framed transport", "TTransport", "conn", "=", "new", "TFramedTransport", "(", "transport", ",", "maxBufferSize", ")", ";", "// connect", "conn", ".", "open", "(", ")", ";", "LOG", ".", "debug", "(", "\"Simple client transport has been established\"", ")", ";", "return", "conn", ";", "}" ]
Connect to the specified server via framed transport @param transport The underlying Thrift transport. @param serverHost unused. @param asUser unused.
[ "Connect", "to", "the", "specified", "server", "via", "framed", "transport" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/SimpleTransportPlugin.java#L95-L106
25,218
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/utils/JStormSignalHandler.java
JStormSignalHandler.registerSignal
public synchronized void registerSignal(int signalNumber, Runnable callback, boolean replace) { String signalName = signalMap.get(signalNumber); if (signalName == null) { LOG.warn("Invalid signalNumber " + signalNumber); return; } LOG.info("Begin to register signal of {}", signalName); try { SignalHandler oldHandler = Signal.handle(new Signal(signalName), this); LOG.info("Successfully register {} handler", signalName); Runnable old = signalHandlers.put(signalNumber, callback); if (old != null) { if (!replace) { oldSignalHandlers.put(signalNumber, oldHandler); } else { LOG.info("Successfully old {} handler will be replaced", signalName); } } LOG.info("Successfully register signal of {}", signalName); } catch (Exception e) { LOG.error("Failed to register " + signalName + ":" + signalNumber + ", Signal already used by VM or OS: SIGILL"); } }
java
public synchronized void registerSignal(int signalNumber, Runnable callback, boolean replace) { String signalName = signalMap.get(signalNumber); if (signalName == null) { LOG.warn("Invalid signalNumber " + signalNumber); return; } LOG.info("Begin to register signal of {}", signalName); try { SignalHandler oldHandler = Signal.handle(new Signal(signalName), this); LOG.info("Successfully register {} handler", signalName); Runnable old = signalHandlers.put(signalNumber, callback); if (old != null) { if (!replace) { oldSignalHandlers.put(signalNumber, oldHandler); } else { LOG.info("Successfully old {} handler will be replaced", signalName); } } LOG.info("Successfully register signal of {}", signalName); } catch (Exception e) { LOG.error("Failed to register " + signalName + ":" + signalNumber + ", Signal already used by VM or OS: SIGILL"); } }
[ "public", "synchronized", "void", "registerSignal", "(", "int", "signalNumber", ",", "Runnable", "callback", ",", "boolean", "replace", ")", "{", "String", "signalName", "=", "signalMap", ".", "get", "(", "signalNumber", ")", ";", "if", "(", "signalName", "==", "null", ")", "{", "LOG", ".", "warn", "(", "\"Invalid signalNumber \"", "+", "signalNumber", ")", ";", "return", ";", "}", "LOG", ".", "info", "(", "\"Begin to register signal of {}\"", ",", "signalName", ")", ";", "try", "{", "SignalHandler", "oldHandler", "=", "Signal", ".", "handle", "(", "new", "Signal", "(", "signalName", ")", ",", "this", ")", ";", "LOG", ".", "info", "(", "\"Successfully register {} handler\"", ",", "signalName", ")", ";", "Runnable", "old", "=", "signalHandlers", ".", "put", "(", "signalNumber", ",", "callback", ")", ";", "if", "(", "old", "!=", "null", ")", "{", "if", "(", "!", "replace", ")", "{", "oldSignalHandlers", ".", "put", "(", "signalNumber", ",", "oldHandler", ")", ";", "}", "else", "{", "LOG", ".", "info", "(", "\"Successfully old {} handler will be replaced\"", ",", "signalName", ")", ";", "}", "}", "LOG", ".", "info", "(", "\"Successfully register signal of {}\"", ",", "signalName", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "LOG", ".", "error", "(", "\"Failed to register \"", "+", "signalName", "+", "\":\"", "+", "signalNumber", "+", "\", Signal already used by VM or OS: SIGILL\"", ")", ";", "}", "}" ]
Register signal to system if callback is null, then the current process will ignore this signal
[ "Register", "signal", "to", "system", "if", "callback", "is", "null", "then", "the", "current", "process", "will", "ignore", "this", "signal" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/utils/JStormSignalHandler.java#L111-L138
25,219
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/task/comm/TaskSendTargets.java
TaskSendTargets.get
public List<Integer> get(Integer out_task_id, String stream, List<Object> tuple, Collection<Tuple> anchors, Object root_id) { // in order to improve acker's performance, skip checking // String target_component = // topologyContext.getComponentId(out_task_id); // Map<String, MkGrouper> component_prouping = streamComponentGrouper // .get(stream); // MkGrouper grouping = component_prouping.get(target_component); // if (grouping != null && // !GrouperType.direct.equals(grouping.gettype())) { // throw new IllegalArgumentException( // "Cannot emitDirect to a task expecting a regular grouping"); // } if (isDebug(anchors, root_id)) { LOG.info(debugIdStr + stream + " to " + out_task_id + ":" + tuple); } taskStats.send_tuple(stream, 1); List<Integer> out_tasks = new ArrayList<>(); out_tasks.add(out_task_id); return out_tasks; }
java
public List<Integer> get(Integer out_task_id, String stream, List<Object> tuple, Collection<Tuple> anchors, Object root_id) { // in order to improve acker's performance, skip checking // String target_component = // topologyContext.getComponentId(out_task_id); // Map<String, MkGrouper> component_prouping = streamComponentGrouper // .get(stream); // MkGrouper grouping = component_prouping.get(target_component); // if (grouping != null && // !GrouperType.direct.equals(grouping.gettype())) { // throw new IllegalArgumentException( // "Cannot emitDirect to a task expecting a regular grouping"); // } if (isDebug(anchors, root_id)) { LOG.info(debugIdStr + stream + " to " + out_task_id + ":" + tuple); } taskStats.send_tuple(stream, 1); List<Integer> out_tasks = new ArrayList<>(); out_tasks.add(out_task_id); return out_tasks; }
[ "public", "List", "<", "Integer", ">", "get", "(", "Integer", "out_task_id", ",", "String", "stream", ",", "List", "<", "Object", ">", "tuple", ",", "Collection", "<", "Tuple", ">", "anchors", ",", "Object", "root_id", ")", "{", "// in order to improve acker's performance, skip checking", "// String target_component =", "// topologyContext.getComponentId(out_task_id);", "// Map<String, MkGrouper> component_prouping = streamComponentGrouper", "// .get(stream);", "// MkGrouper grouping = component_prouping.get(target_component);", "// if (grouping != null &&", "// !GrouperType.direct.equals(grouping.gettype())) {", "// throw new IllegalArgumentException(", "// \"Cannot emitDirect to a task expecting a regular grouping\");", "// }", "if", "(", "isDebug", "(", "anchors", ",", "root_id", ")", ")", "{", "LOG", ".", "info", "(", "debugIdStr", "+", "stream", "+", "\" to \"", "+", "out_task_id", "+", "\":\"", "+", "tuple", ")", ";", "}", "taskStats", ".", "send_tuple", "(", "stream", ",", "1", ")", ";", "List", "<", "Integer", ">", "out_tasks", "=", "new", "ArrayList", "<>", "(", ")", ";", "out_tasks", ".", "add", "(", "out_task_id", ")", ";", "return", "out_tasks", ";", "}" ]
direct send tuple to special task
[ "direct", "send", "tuple", "to", "special", "task" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/task/comm/TaskSendTargets.java#L71-L95
25,220
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/task/comm/TaskSendTargets.java
TaskSendTargets.get
public List<Integer> get(String stream, List<Object> tuple, Collection<Tuple> anchors, Object rootId) { List<Integer> outTasks = new ArrayList<>(); // get grouper, then get which task should tuple be sent to. Map<String, MkGrouper> componentCrouping = streamComponentGrouper.get(stream); if (componentCrouping == null) { // if the target component's parallelism is 0, don't need send to // them LOG.debug("Failed to get Grouper of " + stream + " when " + debugIdStr); return outTasks; } for (Entry<String, MkGrouper> ee : componentCrouping.entrySet()) { String targetComponent = ee.getKey(); MkGrouper g = ee.getValue(); if (GrouperType.direct.equals(g.gettype())) { throw new IllegalArgumentException("Cannot do regular emit to direct stream"); } outTasks.addAll(g.grouper(tuple)); } if (isDebug(anchors, rootId)) { LOG.info(debugIdStr + stream + " to " + outTasks + ":" + tuple.toString()); } int num_out_tasks = outTasks.size(); taskStats.send_tuple(stream, num_out_tasks); return outTasks; }
java
public List<Integer> get(String stream, List<Object> tuple, Collection<Tuple> anchors, Object rootId) { List<Integer> outTasks = new ArrayList<>(); // get grouper, then get which task should tuple be sent to. Map<String, MkGrouper> componentCrouping = streamComponentGrouper.get(stream); if (componentCrouping == null) { // if the target component's parallelism is 0, don't need send to // them LOG.debug("Failed to get Grouper of " + stream + " when " + debugIdStr); return outTasks; } for (Entry<String, MkGrouper> ee : componentCrouping.entrySet()) { String targetComponent = ee.getKey(); MkGrouper g = ee.getValue(); if (GrouperType.direct.equals(g.gettype())) { throw new IllegalArgumentException("Cannot do regular emit to direct stream"); } outTasks.addAll(g.grouper(tuple)); } if (isDebug(anchors, rootId)) { LOG.info(debugIdStr + stream + " to " + outTasks + ":" + tuple.toString()); } int num_out_tasks = outTasks.size(); taskStats.send_tuple(stream, num_out_tasks); return outTasks; }
[ "public", "List", "<", "Integer", ">", "get", "(", "String", "stream", ",", "List", "<", "Object", ">", "tuple", ",", "Collection", "<", "Tuple", ">", "anchors", ",", "Object", "rootId", ")", "{", "List", "<", "Integer", ">", "outTasks", "=", "new", "ArrayList", "<>", "(", ")", ";", "// get grouper, then get which task should tuple be sent to.", "Map", "<", "String", ",", "MkGrouper", ">", "componentCrouping", "=", "streamComponentGrouper", ".", "get", "(", "stream", ")", ";", "if", "(", "componentCrouping", "==", "null", ")", "{", "// if the target component's parallelism is 0, don't need send to", "// them", "LOG", ".", "debug", "(", "\"Failed to get Grouper of \"", "+", "stream", "+", "\" when \"", "+", "debugIdStr", ")", ";", "return", "outTasks", ";", "}", "for", "(", "Entry", "<", "String", ",", "MkGrouper", ">", "ee", ":", "componentCrouping", ".", "entrySet", "(", ")", ")", "{", "String", "targetComponent", "=", "ee", ".", "getKey", "(", ")", ";", "MkGrouper", "g", "=", "ee", ".", "getValue", "(", ")", ";", "if", "(", "GrouperType", ".", "direct", ".", "equals", "(", "g", ".", "gettype", "(", ")", ")", ")", "{", "throw", "new", "IllegalArgumentException", "(", "\"Cannot do regular emit to direct stream\"", ")", ";", "}", "outTasks", ".", "addAll", "(", "g", ".", "grouper", "(", "tuple", ")", ")", ";", "}", "if", "(", "isDebug", "(", "anchors", ",", "rootId", ")", ")", "{", "LOG", ".", "info", "(", "debugIdStr", "+", "stream", "+", "\" to \"", "+", "outTasks", "+", "\":\"", "+", "tuple", ".", "toString", "(", ")", ")", ";", "}", "int", "num_out_tasks", "=", "outTasks", ".", "size", "(", ")", ";", "taskStats", ".", "send_tuple", "(", "stream", ",", "num_out_tasks", ")", ";", "return", "outTasks", ";", "}" ]
send tuple according to grouping
[ "send", "tuple", "according", "to", "grouping" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/task/comm/TaskSendTargets.java#L98-L128
25,221
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/metric/JStormMetrics.java
JStormMetrics.find
public static AsmMetric find(String name) { for (AsmMetricRegistry registry : allRegistries) { AsmMetric metric = registry.getMetric(name); if (metric != null) { return metric; } } return null; }
java
public static AsmMetric find(String name) { for (AsmMetricRegistry registry : allRegistries) { AsmMetric metric = registry.getMetric(name); if (metric != null) { return metric; } } return null; }
[ "public", "static", "AsmMetric", "find", "(", "String", "name", ")", "{", "for", "(", "AsmMetricRegistry", "registry", ":", "allRegistries", ")", "{", "AsmMetric", "metric", "=", "registry", ".", "getMetric", "(", "name", ")", ";", "if", "(", "metric", "!=", "null", ")", "{", "return", "metric", ";", "}", "}", "return", "null", ";", "}" ]
reserve for debug purposes
[ "reserve", "for", "debug", "purposes" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/metric/JStormMetrics.java#L169-L177
25,222
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/metric/JStormMetrics.java
JStormMetrics.registerWorkerHistogram
public static AsmHistogram registerWorkerHistogram(String topologyId, String name, AsmHistogram histogram) { return (AsmHistogram) registerWorkerMetric( MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.HISTOGRAM), histogram); }
java
public static AsmHistogram registerWorkerHistogram(String topologyId, String name, AsmHistogram histogram) { return (AsmHistogram) registerWorkerMetric( MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.HISTOGRAM), histogram); }
[ "public", "static", "AsmHistogram", "registerWorkerHistogram", "(", "String", "topologyId", ",", "String", "name", ",", "AsmHistogram", "histogram", ")", "{", "return", "(", "AsmHistogram", ")", "registerWorkerMetric", "(", "MetricUtils", ".", "workerMetricName", "(", "topologyId", ",", "host", ",", "0", ",", "name", ",", "MetricType", ".", "HISTOGRAM", ")", ",", "histogram", ")", ";", "}" ]
simplified helper method to register a worker histogram @param topologyId topology id @param name metric name, NOTE it's not a full-qualified name. @param histogram histogram @return registered histogram
[ "simplified", "helper", "method", "to", "register", "a", "worker", "histogram" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/metric/JStormMetrics.java#L309-L312
25,223
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/metric/JStormMetrics.java
JStormMetrics.registerWorkerGauge
public static AsmGauge registerWorkerGauge(String topologyId, String name, AsmGauge gauge) { return (AsmGauge) registerWorkerMetric( MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.GAUGE), gauge); }
java
public static AsmGauge registerWorkerGauge(String topologyId, String name, AsmGauge gauge) { return (AsmGauge) registerWorkerMetric( MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.GAUGE), gauge); }
[ "public", "static", "AsmGauge", "registerWorkerGauge", "(", "String", "topologyId", ",", "String", "name", ",", "AsmGauge", "gauge", ")", "{", "return", "(", "AsmGauge", ")", "registerWorkerMetric", "(", "MetricUtils", ".", "workerMetricName", "(", "topologyId", ",", "host", ",", "0", ",", "name", ",", "MetricType", ".", "GAUGE", ")", ",", "gauge", ")", ";", "}" ]
simplified helper method to register a worker gauge
[ "simplified", "helper", "method", "to", "register", "a", "worker", "gauge" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/metric/JStormMetrics.java#L317-L320
25,224
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/metric/JStormMetrics.java
JStormMetrics.registerWorkerMeter
public static AsmMeter registerWorkerMeter(String topologyId, String name, AsmMeter meter) { return (AsmMeter) registerWorkerMetric( MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.METER), meter); }
java
public static AsmMeter registerWorkerMeter(String topologyId, String name, AsmMeter meter) { return (AsmMeter) registerWorkerMetric( MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.METER), meter); }
[ "public", "static", "AsmMeter", "registerWorkerMeter", "(", "String", "topologyId", ",", "String", "name", ",", "AsmMeter", "meter", ")", "{", "return", "(", "AsmMeter", ")", "registerWorkerMetric", "(", "MetricUtils", ".", "workerMetricName", "(", "topologyId", ",", "host", ",", "0", ",", "name", ",", "MetricType", ".", "METER", ")", ",", "meter", ")", ";", "}" ]
simplified helper method to register a worker meter
[ "simplified", "helper", "method", "to", "register", "a", "worker", "meter" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/metric/JStormMetrics.java#L325-L328
25,225
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/metric/JStormMetrics.java
JStormMetrics.registerWorkerCounter
public static AsmCounter registerWorkerCounter(String topologyId, String name, AsmCounter counter) { return (AsmCounter) registerWorkerMetric( MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.COUNTER), counter); }
java
public static AsmCounter registerWorkerCounter(String topologyId, String name, AsmCounter counter) { return (AsmCounter) registerWorkerMetric( MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.COUNTER), counter); }
[ "public", "static", "AsmCounter", "registerWorkerCounter", "(", "String", "topologyId", ",", "String", "name", ",", "AsmCounter", "counter", ")", "{", "return", "(", "AsmCounter", ")", "registerWorkerMetric", "(", "MetricUtils", ".", "workerMetricName", "(", "topologyId", ",", "host", ",", "0", ",", "name", ",", "MetricType", ".", "COUNTER", ")", ",", "counter", ")", ";", "}" ]
simplified helper method to register a worker counter
[ "simplified", "helper", "method", "to", "register", "a", "worker", "counter" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/metric/JStormMetrics.java#L333-L336
25,226
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/worker/ProcessSimulator.java
ProcessSimulator.killProcess
public static void killProcess(String pid) { synchronized (lock) { LOG.info("Begin to kill process " + pid); WorkerShutdown shutdownHandle = getProcessHandle(pid); if (shutdownHandle != null) { shutdownHandle.shutdown(); } processMap.remove(pid); LOG.info("Successfully killed process " + pid); } }
java
public static void killProcess(String pid) { synchronized (lock) { LOG.info("Begin to kill process " + pid); WorkerShutdown shutdownHandle = getProcessHandle(pid); if (shutdownHandle != null) { shutdownHandle.shutdown(); } processMap.remove(pid); LOG.info("Successfully killed process " + pid); } }
[ "public", "static", "void", "killProcess", "(", "String", "pid", ")", "{", "synchronized", "(", "lock", ")", "{", "LOG", ".", "info", "(", "\"Begin to kill process \"", "+", "pid", ")", ";", "WorkerShutdown", "shutdownHandle", "=", "getProcessHandle", "(", "pid", ")", ";", "if", "(", "shutdownHandle", "!=", "null", ")", "{", "shutdownHandle", ".", "shutdown", "(", ")", ";", "}", "processMap", ".", "remove", "(", "pid", ")", ";", "LOG", ".", "info", "(", "\"Successfully killed process \"", "+", "pid", ")", ";", "}", "}" ]
Kill a process @param pid process id
[ "Kill", "a", "process" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/worker/ProcessSimulator.java#L68-L82
25,227
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/kerberos/AutoTGT.java
AutoTGT.loginHadoopUser
private void loginHadoopUser(Subject subject) { Class<?> ugi = null; try { ugi = Class.forName("org.apache.hadoop.security.UserGroupInformation"); } catch (ClassNotFoundException e) { LOG.info("Hadoop was not found on the class path"); return; } try { Method isSecEnabled = ugi.getMethod("isSecurityEnabled"); if (!((Boolean) isSecEnabled.invoke(null))) { LOG.warn("Hadoop is on the classpath but not configured for " + "security, if you want security you need to be sure that " + "hadoop.security.authentication=kerberos in core-site.xml " + "in your jar"); return; } try { Method login = ugi.getMethod("loginUserFromSubject", Subject.class); login.invoke(null, subject); } catch (NoSuchMethodException me) { // The version of Hadoop does not have the needed client changes. // So don't look now, but do something really ugly to work around it. // This is because we are reaching into the hidden bits of Hadoop security, and it works for now, but may stop at any point in time. // We are just trying to do the following // Configuration conf = new Configuration(); // HadoopKerberosName.setConfiguration(conf); // subject.getPrincipals().add(new User(tgt.getClient().toString(), AuthenticationMethod.KERBEROS, null)); String name = getTGT(subject).getClient().toString(); LOG.warn("The Hadoop client does not have loginUserFromSubject, Trying to hack around it. This may not work..."); Class<?> confClass = Class.forName("org.apache.hadoop.conf.Configuration"); Constructor confCons = confClass.getConstructor(); Object conf = confCons.newInstance(); Class<?> hknClass = Class.forName("org.apache.hadoop.security.HadoopKerberosName"); Method hknSetConf = hknClass.getMethod("setConfiguration", confClass); hknSetConf.invoke(null, conf); Class<?> authMethodClass = Class.forName("org.apache.hadoop.security.UserGroupInformation$AuthenticationMethod"); Object kerbAuthMethod = null; for (Object authMethod : authMethodClass.getEnumConstants()) { if ("KERBEROS".equals(authMethod.toString())) { kerbAuthMethod = authMethod; break; } } Class<?> userClass = Class.forName("org.apache.hadoop.security.User"); Constructor userCons = userClass.getConstructor(String.class, authMethodClass, LoginContext.class); userCons.setAccessible(true); Object user = userCons.newInstance(name, kerbAuthMethod, null); subject.getPrincipals().add((Principal) user); } } catch (Exception e) { LOG.warn("Something went wrong while trying to initialize Hadoop through reflection. This version of hadoop may not be compatible.", e); } }
java
private void loginHadoopUser(Subject subject) { Class<?> ugi = null; try { ugi = Class.forName("org.apache.hadoop.security.UserGroupInformation"); } catch (ClassNotFoundException e) { LOG.info("Hadoop was not found on the class path"); return; } try { Method isSecEnabled = ugi.getMethod("isSecurityEnabled"); if (!((Boolean) isSecEnabled.invoke(null))) { LOG.warn("Hadoop is on the classpath but not configured for " + "security, if you want security you need to be sure that " + "hadoop.security.authentication=kerberos in core-site.xml " + "in your jar"); return; } try { Method login = ugi.getMethod("loginUserFromSubject", Subject.class); login.invoke(null, subject); } catch (NoSuchMethodException me) { // The version of Hadoop does not have the needed client changes. // So don't look now, but do something really ugly to work around it. // This is because we are reaching into the hidden bits of Hadoop security, and it works for now, but may stop at any point in time. // We are just trying to do the following // Configuration conf = new Configuration(); // HadoopKerberosName.setConfiguration(conf); // subject.getPrincipals().add(new User(tgt.getClient().toString(), AuthenticationMethod.KERBEROS, null)); String name = getTGT(subject).getClient().toString(); LOG.warn("The Hadoop client does not have loginUserFromSubject, Trying to hack around it. This may not work..."); Class<?> confClass = Class.forName("org.apache.hadoop.conf.Configuration"); Constructor confCons = confClass.getConstructor(); Object conf = confCons.newInstance(); Class<?> hknClass = Class.forName("org.apache.hadoop.security.HadoopKerberosName"); Method hknSetConf = hknClass.getMethod("setConfiguration", confClass); hknSetConf.invoke(null, conf); Class<?> authMethodClass = Class.forName("org.apache.hadoop.security.UserGroupInformation$AuthenticationMethod"); Object kerbAuthMethod = null; for (Object authMethod : authMethodClass.getEnumConstants()) { if ("KERBEROS".equals(authMethod.toString())) { kerbAuthMethod = authMethod; break; } } Class<?> userClass = Class.forName("org.apache.hadoop.security.User"); Constructor userCons = userClass.getConstructor(String.class, authMethodClass, LoginContext.class); userCons.setAccessible(true); Object user = userCons.newInstance(name, kerbAuthMethod, null); subject.getPrincipals().add((Principal) user); } } catch (Exception e) { LOG.warn("Something went wrong while trying to initialize Hadoop through reflection. This version of hadoop may not be compatible.", e); } }
[ "private", "void", "loginHadoopUser", "(", "Subject", "subject", ")", "{", "Class", "<", "?", ">", "ugi", "=", "null", ";", "try", "{", "ugi", "=", "Class", ".", "forName", "(", "\"org.apache.hadoop.security.UserGroupInformation\"", ")", ";", "}", "catch", "(", "ClassNotFoundException", "e", ")", "{", "LOG", ".", "info", "(", "\"Hadoop was not found on the class path\"", ")", ";", "return", ";", "}", "try", "{", "Method", "isSecEnabled", "=", "ugi", ".", "getMethod", "(", "\"isSecurityEnabled\"", ")", ";", "if", "(", "!", "(", "(", "Boolean", ")", "isSecEnabled", ".", "invoke", "(", "null", ")", ")", ")", "{", "LOG", ".", "warn", "(", "\"Hadoop is on the classpath but not configured for \"", "+", "\"security, if you want security you need to be sure that \"", "+", "\"hadoop.security.authentication=kerberos in core-site.xml \"", "+", "\"in your jar\"", ")", ";", "return", ";", "}", "try", "{", "Method", "login", "=", "ugi", ".", "getMethod", "(", "\"loginUserFromSubject\"", ",", "Subject", ".", "class", ")", ";", "login", ".", "invoke", "(", "null", ",", "subject", ")", ";", "}", "catch", "(", "NoSuchMethodException", "me", ")", "{", "// The version of Hadoop does not have the needed client changes.", "// So don't look now, but do something really ugly to work around it.", "// This is because we are reaching into the hidden bits of Hadoop security, and it works for now, but may stop at any point in time.", "// We are just trying to do the following", "// Configuration conf = new Configuration();", "// HadoopKerberosName.setConfiguration(conf);", "// subject.getPrincipals().add(new User(tgt.getClient().toString(), AuthenticationMethod.KERBEROS, null));", "String", "name", "=", "getTGT", "(", "subject", ")", ".", "getClient", "(", ")", ".", "toString", "(", ")", ";", "LOG", ".", "warn", "(", "\"The Hadoop client does not have loginUserFromSubject, Trying to hack around it. This may not work...\"", ")", ";", "Class", "<", "?", ">", "confClass", "=", "Class", ".", "forName", "(", "\"org.apache.hadoop.conf.Configuration\"", ")", ";", "Constructor", "confCons", "=", "confClass", ".", "getConstructor", "(", ")", ";", "Object", "conf", "=", "confCons", ".", "newInstance", "(", ")", ";", "Class", "<", "?", ">", "hknClass", "=", "Class", ".", "forName", "(", "\"org.apache.hadoop.security.HadoopKerberosName\"", ")", ";", "Method", "hknSetConf", "=", "hknClass", ".", "getMethod", "(", "\"setConfiguration\"", ",", "confClass", ")", ";", "hknSetConf", ".", "invoke", "(", "null", ",", "conf", ")", ";", "Class", "<", "?", ">", "authMethodClass", "=", "Class", ".", "forName", "(", "\"org.apache.hadoop.security.UserGroupInformation$AuthenticationMethod\"", ")", ";", "Object", "kerbAuthMethod", "=", "null", ";", "for", "(", "Object", "authMethod", ":", "authMethodClass", ".", "getEnumConstants", "(", ")", ")", "{", "if", "(", "\"KERBEROS\"", ".", "equals", "(", "authMethod", ".", "toString", "(", ")", ")", ")", "{", "kerbAuthMethod", "=", "authMethod", ";", "break", ";", "}", "}", "Class", "<", "?", ">", "userClass", "=", "Class", ".", "forName", "(", "\"org.apache.hadoop.security.User\"", ")", ";", "Constructor", "userCons", "=", "userClass", ".", "getConstructor", "(", "String", ".", "class", ",", "authMethodClass", ",", "LoginContext", ".", "class", ")", ";", "userCons", ".", "setAccessible", "(", "true", ")", ";", "Object", "user", "=", "userCons", ".", "newInstance", "(", "name", ",", "kerbAuthMethod", ",", "null", ")", ";", "subject", ".", "getPrincipals", "(", ")", ".", "add", "(", "(", "Principal", ")", "user", ")", ";", "}", "}", "catch", "(", "Exception", "e", ")", "{", "LOG", ".", "warn", "(", "\"Something went wrong while trying to initialize Hadoop through reflection. This version of hadoop may not be compatible.\"", ",", "e", ")", ";", "}", "}" ]
Hadoop does not just go off of a TGT, it needs a bit more. This should fill in the rest. @param subject the subject that should have a TGT in it.
[ "Hadoop", "does", "not", "just", "go", "off", "of", "a", "TGT", "it", "needs", "a", "bit", "more", ".", "This", "should", "fill", "in", "the", "rest", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/kerberos/AutoTGT.java#L186-L242
25,228
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/worker/UpdateListener.java
UpdateListener.update
public void update(Map conf) { for (IUpdater updater : updaters) { try { updater.update(conf); } catch (Exception e) { LOG.error(e.getMessage(), e); } } }
java
public void update(Map conf) { for (IUpdater updater : updaters) { try { updater.update(conf); } catch (Exception e) { LOG.error(e.getMessage(), e); } } }
[ "public", "void", "update", "(", "Map", "conf", ")", "{", "for", "(", "IUpdater", "updater", ":", "updaters", ")", "{", "try", "{", "updater", ".", "update", "(", "conf", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "LOG", ".", "error", "(", "e", ".", "getMessage", "(", ")", ",", "e", ")", ";", "}", "}", "}" ]
trigger all updaters' update action @param conf the new worker conf
[ "trigger", "all", "updaters", "update", "action" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/worker/UpdateListener.java#L56-L64
25,229
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/metric/MetricUtils.java
MetricUtils.stream2taskName
public static String stream2taskName(String old) { String[] parts = old.split(DELIM); if (parts.length >= 7) { parts[0] = MetaType.TASK.getV() + parts[0].charAt(1); parts[parts.length - 3] = EMPTY; String metricName = getMergeMetricName(parts[parts.length - 1]); parts[parts.length - 1] = metricName; } return concat(parts); }
java
public static String stream2taskName(String old) { String[] parts = old.split(DELIM); if (parts.length >= 7) { parts[0] = MetaType.TASK.getV() + parts[0].charAt(1); parts[parts.length - 3] = EMPTY; String metricName = getMergeMetricName(parts[parts.length - 1]); parts[parts.length - 1] = metricName; } return concat(parts); }
[ "public", "static", "String", "stream2taskName", "(", "String", "old", ")", "{", "String", "[", "]", "parts", "=", "old", ".", "split", "(", "DELIM", ")", ";", "if", "(", "parts", ".", "length", ">=", "7", ")", "{", "parts", "[", "0", "]", "=", "MetaType", ".", "TASK", ".", "getV", "(", ")", "+", "parts", "[", "0", "]", ".", "charAt", "(", "1", ")", ";", "parts", "[", "parts", ".", "length", "-", "3", "]", "=", "EMPTY", ";", "String", "metricName", "=", "getMergeMetricName", "(", "parts", "[", "parts", ".", "length", "-", "1", "]", ")", ";", "parts", "[", "parts", ".", "length", "-", "1", "]", "=", "metricName", ";", "}", "return", "concat", "(", "parts", ")", ";", "}" ]
make streamId empty, remain other parts the same
[ "make", "streamId", "empty", "remain", "other", "parts", "the", "same" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/metric/MetricUtils.java#L152-L162
25,230
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/metric/MetricUtils.java
MetricUtils.task2compName
public static String task2compName(String old) { String[] parts = old.split(DELIM); if (parts.length >= 7) { parts[0] = MetaType.COMPONENT.getV() + parts[0].charAt(1); parts[parts.length - 3] = EMPTY; parts[parts.length - 4] = "0"; } return concat(parts); }
java
public static String task2compName(String old) { String[] parts = old.split(DELIM); if (parts.length >= 7) { parts[0] = MetaType.COMPONENT.getV() + parts[0].charAt(1); parts[parts.length - 3] = EMPTY; parts[parts.length - 4] = "0"; } return concat(parts); }
[ "public", "static", "String", "task2compName", "(", "String", "old", ")", "{", "String", "[", "]", "parts", "=", "old", ".", "split", "(", "DELIM", ")", ";", "if", "(", "parts", ".", "length", ">=", "7", ")", "{", "parts", "[", "0", "]", "=", "MetaType", ".", "COMPONENT", ".", "getV", "(", ")", "+", "parts", "[", "0", "]", ".", "charAt", "(", "1", ")", ";", "parts", "[", "parts", ".", "length", "-", "3", "]", "=", "EMPTY", ";", "parts", "[", "parts", ".", "length", "-", "4", "]", "=", "\"0\"", ";", "}", "return", "concat", "(", "parts", ")", ";", "}" ]
make taskId=0 and streamId empty.
[ "make", "taskId", "=", "0", "and", "streamId", "empty", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/metric/MetricUtils.java#L167-L175
25,231
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/metric/MetricUtils.java
MetricUtils.stream2compStreamName
public static String stream2compStreamName(String old) { String[] parts = old.split(DELIM); if (parts.length >= 7) { parts[0] = MetaType.COMPONENT_STREAM.getV() + parts[0].charAt(1); //parts[parts.length - 3] = EMPTY; retain stream name parts[parts.length - 4] = "0"; // task parts[parts.length - 1] = getMergeMetricName(parts[parts.length - 1]); } return concat(parts); }
java
public static String stream2compStreamName(String old) { String[] parts = old.split(DELIM); if (parts.length >= 7) { parts[0] = MetaType.COMPONENT_STREAM.getV() + parts[0].charAt(1); //parts[parts.length - 3] = EMPTY; retain stream name parts[parts.length - 4] = "0"; // task parts[parts.length - 1] = getMergeMetricName(parts[parts.length - 1]); } return concat(parts); }
[ "public", "static", "String", "stream2compStreamName", "(", "String", "old", ")", "{", "String", "[", "]", "parts", "=", "old", ".", "split", "(", "DELIM", ")", ";", "if", "(", "parts", ".", "length", ">=", "7", ")", "{", "parts", "[", "0", "]", "=", "MetaType", ".", "COMPONENT_STREAM", ".", "getV", "(", ")", "+", "parts", "[", "0", "]", ".", "charAt", "(", "1", ")", ";", "//parts[parts.length - 3] = EMPTY; retain stream name", "parts", "[", "parts", ".", "length", "-", "4", "]", "=", "\"0\"", ";", "// task", "parts", "[", "parts", ".", "length", "-", "1", "]", "=", "getMergeMetricName", "(", "parts", "[", "parts", ".", "length", "-", "1", "]", ")", ";", "}", "return", "concat", "(", "parts", ")", ";", "}" ]
converts a task metric name to a component-level stream metric name
[ "converts", "a", "task", "metric", "name", "to", "a", "component", "-", "level", "stream", "metric", "name" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/metric/MetricUtils.java#L180-L189
25,232
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/metric/MetricUtils.java
MetricUtils.task2MergeCompName
public static String task2MergeCompName(String old) { String[] parts = old.split(DELIM); if (parts.length >= 7) { parts[0] = MetaType.COMPONENT.getV() + parts[0].charAt(1); parts[parts.length - 3] = EMPTY; parts[parts.length - 4] = "0"; String metricName = getMergeMetricName(parts[parts.length - 1]); parts[parts.length - 1] = metricName; } return concat(parts); }
java
public static String task2MergeCompName(String old) { String[] parts = old.split(DELIM); if (parts.length >= 7) { parts[0] = MetaType.COMPONENT.getV() + parts[0].charAt(1); parts[parts.length - 3] = EMPTY; parts[parts.length - 4] = "0"; String metricName = getMergeMetricName(parts[parts.length - 1]); parts[parts.length - 1] = metricName; } return concat(parts); }
[ "public", "static", "String", "task2MergeCompName", "(", "String", "old", ")", "{", "String", "[", "]", "parts", "=", "old", ".", "split", "(", "DELIM", ")", ";", "if", "(", "parts", ".", "length", ">=", "7", ")", "{", "parts", "[", "0", "]", "=", "MetaType", ".", "COMPONENT", ".", "getV", "(", ")", "+", "parts", "[", "0", "]", ".", "charAt", "(", "1", ")", ";", "parts", "[", "parts", ".", "length", "-", "3", "]", "=", "EMPTY", ";", "parts", "[", "parts", ".", "length", "-", "4", "]", "=", "\"0\"", ";", "String", "metricName", "=", "getMergeMetricName", "(", "parts", "[", "parts", ".", "length", "-", "1", "]", ")", ";", "parts", "[", "parts", ".", "length", "-", "1", "]", "=", "metricName", ";", "}", "return", "concat", "(", "parts", ")", ";", "}" ]
make taskId=0 and streamId empty and metricName remain the string after `.`.
[ "make", "taskId", "=", "0", "and", "streamId", "empty", "and", "metricName", "remain", "the", "string", "after", ".", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/metric/MetricUtils.java#L194-L205
25,233
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/metric/MetricUtils.java
MetricUtils.comp2topologyName
public static String comp2topologyName(String old) { String[] parts = old.split(DELIM); parts[0] = MetaType.TOPOLOGY.getV() + parts[0].charAt(1); // type + topologyId + comp + task + group + name return concat(parts[0], parts[1], EMPTY, "0", parts[5], parts[6]); }
java
public static String comp2topologyName(String old) { String[] parts = old.split(DELIM); parts[0] = MetaType.TOPOLOGY.getV() + parts[0].charAt(1); // type + topologyId + comp + task + group + name return concat(parts[0], parts[1], EMPTY, "0", parts[5], parts[6]); }
[ "public", "static", "String", "comp2topologyName", "(", "String", "old", ")", "{", "String", "[", "]", "parts", "=", "old", ".", "split", "(", "DELIM", ")", ";", "parts", "[", "0", "]", "=", "MetaType", ".", "TOPOLOGY", ".", "getV", "(", ")", "+", "parts", "[", "0", "]", ".", "charAt", "(", "1", ")", ";", "// type + topologyId + comp + task + group + name", "return", "concat", "(", "parts", "[", "0", "]", ",", "parts", "[", "1", "]", ",", "EMPTY", ",", "\"0\"", ",", "parts", "[", "5", "]", ",", "parts", "[", "6", "]", ")", ";", "}" ]
change component metric name to topology metric name
[ "change", "component", "metric", "name", "to", "topology", "metric", "name" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/metric/MetricUtils.java#L218-L223
25,234
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java
AuthUtils.GetConfiguration
public static Configuration GetConfiguration(Map storm_conf) { Configuration login_conf = null; // find login file configuration from Storm configuration String loginConfigurationFile = (String) storm_conf.get("java.security.auth.login.config"); if ((loginConfigurationFile != null) && (loginConfigurationFile.length() > 0)) { File config_file = new File(loginConfigurationFile); if (!config_file.canRead()) { throw new RuntimeException("File " + loginConfigurationFile + " cannot be read."); } try { URI config_uri = config_file.toURI(); login_conf = Configuration.getInstance("JavaLoginConfig", new URIParameter(config_uri)); } catch (Exception ex) { throw new RuntimeException(ex); } } return login_conf; }
java
public static Configuration GetConfiguration(Map storm_conf) { Configuration login_conf = null; // find login file configuration from Storm configuration String loginConfigurationFile = (String) storm_conf.get("java.security.auth.login.config"); if ((loginConfigurationFile != null) && (loginConfigurationFile.length() > 0)) { File config_file = new File(loginConfigurationFile); if (!config_file.canRead()) { throw new RuntimeException("File " + loginConfigurationFile + " cannot be read."); } try { URI config_uri = config_file.toURI(); login_conf = Configuration.getInstance("JavaLoginConfig", new URIParameter(config_uri)); } catch (Exception ex) { throw new RuntimeException(ex); } } return login_conf; }
[ "public", "static", "Configuration", "GetConfiguration", "(", "Map", "storm_conf", ")", "{", "Configuration", "login_conf", "=", "null", ";", "// find login file configuration from Storm configuration", "String", "loginConfigurationFile", "=", "(", "String", ")", "storm_conf", ".", "get", "(", "\"java.security.auth.login.config\"", ")", ";", "if", "(", "(", "loginConfigurationFile", "!=", "null", ")", "&&", "(", "loginConfigurationFile", ".", "length", "(", ")", ">", "0", ")", ")", "{", "File", "config_file", "=", "new", "File", "(", "loginConfigurationFile", ")", ";", "if", "(", "!", "config_file", ".", "canRead", "(", ")", ")", "{", "throw", "new", "RuntimeException", "(", "\"File \"", "+", "loginConfigurationFile", "+", "\" cannot be read.\"", ")", ";", "}", "try", "{", "URI", "config_uri", "=", "config_file", ".", "toURI", "(", ")", ";", "login_conf", "=", "Configuration", ".", "getInstance", "(", "\"JavaLoginConfig\"", ",", "new", "URIParameter", "(", "config_uri", ")", ")", ";", "}", "catch", "(", "Exception", "ex", ")", "{", "throw", "new", "RuntimeException", "(", "ex", ")", ";", "}", "}", "return", "login_conf", ";", "}" ]
Construct a JAAS configuration object per storm configuration file @param storm_conf Storm configuration @return JAAS configuration object
[ "Construct", "a", "JAAS", "configuration", "object", "per", "storm", "configuration", "file" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java#L52-L71
25,235
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java
AuthUtils.GetPrincipalToLocalPlugin
public static IPrincipalToLocal GetPrincipalToLocalPlugin(Map storm_conf) { IPrincipalToLocal ptol = null; try { String ptol_klassName = (String) storm_conf.get(Config.STORM_PRINCIPAL_TO_LOCAL_PLUGIN); Class klass = Class.forName(ptol_klassName); ptol = (IPrincipalToLocal) klass.newInstance(); ptol.prepare(storm_conf); } catch (Exception e) { throw new RuntimeException(e); } return ptol; }
java
public static IPrincipalToLocal GetPrincipalToLocalPlugin(Map storm_conf) { IPrincipalToLocal ptol = null; try { String ptol_klassName = (String) storm_conf.get(Config.STORM_PRINCIPAL_TO_LOCAL_PLUGIN); Class klass = Class.forName(ptol_klassName); ptol = (IPrincipalToLocal) klass.newInstance(); ptol.prepare(storm_conf); } catch (Exception e) { throw new RuntimeException(e); } return ptol; }
[ "public", "static", "IPrincipalToLocal", "GetPrincipalToLocalPlugin", "(", "Map", "storm_conf", ")", "{", "IPrincipalToLocal", "ptol", "=", "null", ";", "try", "{", "String", "ptol_klassName", "=", "(", "String", ")", "storm_conf", ".", "get", "(", "Config", ".", "STORM_PRINCIPAL_TO_LOCAL_PLUGIN", ")", ";", "Class", "klass", "=", "Class", ".", "forName", "(", "ptol_klassName", ")", ";", "ptol", "=", "(", "IPrincipalToLocal", ")", "klass", ".", "newInstance", "(", ")", ";", "ptol", ".", "prepare", "(", "storm_conf", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "throw", "new", "RuntimeException", "(", "e", ")", ";", "}", "return", "ptol", ";", "}" ]
Construct a principal to local plugin @param conf storm configuration @return the plugin
[ "Construct", "a", "principal", "to", "local", "plugin" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java#L79-L90
25,236
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java
AuthUtils.GetGroupMappingServiceProviderPlugin
public static IGroupMappingServiceProvider GetGroupMappingServiceProviderPlugin(Map storm_conf) { IGroupMappingServiceProvider gmsp = null; try { String gmsp_klassName = (String) storm_conf.get(Config.STORM_GROUP_MAPPING_SERVICE_PROVIDER_PLUGIN); Class klass = Class.forName(gmsp_klassName); gmsp = (IGroupMappingServiceProvider) klass.newInstance(); gmsp.prepare(storm_conf); } catch (Exception e) { throw new RuntimeException(e); } return gmsp; }
java
public static IGroupMappingServiceProvider GetGroupMappingServiceProviderPlugin(Map storm_conf) { IGroupMappingServiceProvider gmsp = null; try { String gmsp_klassName = (String) storm_conf.get(Config.STORM_GROUP_MAPPING_SERVICE_PROVIDER_PLUGIN); Class klass = Class.forName(gmsp_klassName); gmsp = (IGroupMappingServiceProvider) klass.newInstance(); gmsp.prepare(storm_conf); } catch (Exception e) { throw new RuntimeException(e); } return gmsp; }
[ "public", "static", "IGroupMappingServiceProvider", "GetGroupMappingServiceProviderPlugin", "(", "Map", "storm_conf", ")", "{", "IGroupMappingServiceProvider", "gmsp", "=", "null", ";", "try", "{", "String", "gmsp_klassName", "=", "(", "String", ")", "storm_conf", ".", "get", "(", "Config", ".", "STORM_GROUP_MAPPING_SERVICE_PROVIDER_PLUGIN", ")", ";", "Class", "klass", "=", "Class", ".", "forName", "(", "gmsp_klassName", ")", ";", "gmsp", "=", "(", "IGroupMappingServiceProvider", ")", "klass", ".", "newInstance", "(", ")", ";", "gmsp", ".", "prepare", "(", "storm_conf", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "throw", "new", "RuntimeException", "(", "e", ")", ";", "}", "return", "gmsp", ";", "}" ]
Construct a group mapping service provider plugin @param conf storm configuration @return the plugin
[ "Construct", "a", "group", "mapping", "service", "provider", "plugin" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java#L98-L109
25,237
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java
AuthUtils.GetCredentialRenewers
public static Collection<ICredentialsRenewer> GetCredentialRenewers(Map conf) { try { Set<ICredentialsRenewer> ret = new HashSet<ICredentialsRenewer>(); Collection<String> clazzes = (Collection<String>) conf.get(Config.NIMBUS_CREDENTIAL_RENEWERS); if (clazzes != null) { for (String clazz : clazzes) { ICredentialsRenewer inst = (ICredentialsRenewer) Class.forName(clazz).newInstance(); inst.prepare(conf); ret.add(inst); } } return ret; } catch (Exception e) { throw new RuntimeException(e); } }
java
public static Collection<ICredentialsRenewer> GetCredentialRenewers(Map conf) { try { Set<ICredentialsRenewer> ret = new HashSet<ICredentialsRenewer>(); Collection<String> clazzes = (Collection<String>) conf.get(Config.NIMBUS_CREDENTIAL_RENEWERS); if (clazzes != null) { for (String clazz : clazzes) { ICredentialsRenewer inst = (ICredentialsRenewer) Class.forName(clazz).newInstance(); inst.prepare(conf); ret.add(inst); } } return ret; } catch (Exception e) { throw new RuntimeException(e); } }
[ "public", "static", "Collection", "<", "ICredentialsRenewer", ">", "GetCredentialRenewers", "(", "Map", "conf", ")", "{", "try", "{", "Set", "<", "ICredentialsRenewer", ">", "ret", "=", "new", "HashSet", "<", "ICredentialsRenewer", ">", "(", ")", ";", "Collection", "<", "String", ">", "clazzes", "=", "(", "Collection", "<", "String", ">", ")", "conf", ".", "get", "(", "Config", ".", "NIMBUS_CREDENTIAL_RENEWERS", ")", ";", "if", "(", "clazzes", "!=", "null", ")", "{", "for", "(", "String", "clazz", ":", "clazzes", ")", "{", "ICredentialsRenewer", "inst", "=", "(", "ICredentialsRenewer", ")", "Class", ".", "forName", "(", "clazz", ")", ".", "newInstance", "(", ")", ";", "inst", ".", "prepare", "(", "conf", ")", ";", "ret", ".", "add", "(", "inst", ")", ";", "}", "}", "return", "ret", ";", "}", "catch", "(", "Exception", "e", ")", "{", "throw", "new", "RuntimeException", "(", "e", ")", ";", "}", "}" ]
Get all of the configured Credential Renwer Plugins. @param storm_conf the storm configuration to use. @return the configured credential renewers.
[ "Get", "all", "of", "the", "configured", "Credential", "Renwer", "Plugins", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java#L117-L132
25,238
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java
AuthUtils.getNimbusAutoCredPlugins
public static Collection<INimbusCredentialPlugin> getNimbusAutoCredPlugins(Map conf) { try { Set<INimbusCredentialPlugin> ret = new HashSet<INimbusCredentialPlugin>(); Collection<String> clazzes = (Collection<String>) conf.get(Config.NIMBUS_AUTO_CRED_PLUGINS); if (clazzes != null) { for (String clazz : clazzes) { INimbusCredentialPlugin inst = (INimbusCredentialPlugin) Class.forName(clazz).newInstance(); inst.prepare(conf); ret.add(inst); } } return ret; } catch (Exception e) { throw new RuntimeException(e); } }
java
public static Collection<INimbusCredentialPlugin> getNimbusAutoCredPlugins(Map conf) { try { Set<INimbusCredentialPlugin> ret = new HashSet<INimbusCredentialPlugin>(); Collection<String> clazzes = (Collection<String>) conf.get(Config.NIMBUS_AUTO_CRED_PLUGINS); if (clazzes != null) { for (String clazz : clazzes) { INimbusCredentialPlugin inst = (INimbusCredentialPlugin) Class.forName(clazz).newInstance(); inst.prepare(conf); ret.add(inst); } } return ret; } catch (Exception e) { throw new RuntimeException(e); } }
[ "public", "static", "Collection", "<", "INimbusCredentialPlugin", ">", "getNimbusAutoCredPlugins", "(", "Map", "conf", ")", "{", "try", "{", "Set", "<", "INimbusCredentialPlugin", ">", "ret", "=", "new", "HashSet", "<", "INimbusCredentialPlugin", ">", "(", ")", ";", "Collection", "<", "String", ">", "clazzes", "=", "(", "Collection", "<", "String", ">", ")", "conf", ".", "get", "(", "Config", ".", "NIMBUS_AUTO_CRED_PLUGINS", ")", ";", "if", "(", "clazzes", "!=", "null", ")", "{", "for", "(", "String", "clazz", ":", "clazzes", ")", "{", "INimbusCredentialPlugin", "inst", "=", "(", "INimbusCredentialPlugin", ")", "Class", ".", "forName", "(", "clazz", ")", ".", "newInstance", "(", ")", ";", "inst", ".", "prepare", "(", "conf", ")", ";", "ret", ".", "add", "(", "inst", ")", ";", "}", "}", "return", "ret", ";", "}", "catch", "(", "Exception", "e", ")", "{", "throw", "new", "RuntimeException", "(", "e", ")", ";", "}", "}" ]
Get all the Nimbus Auto cred plugins. @param conf nimbus configuration to use. @return nimbus auto credential plugins.
[ "Get", "all", "the", "Nimbus", "Auto", "cred", "plugins", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java#L140-L155
25,239
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java
AuthUtils.GetAutoCredentials
public static Collection<IAutoCredentials> GetAutoCredentials(Map storm_conf) { try { Set<IAutoCredentials> autos = new HashSet<IAutoCredentials>(); Collection<String> clazzes = (Collection<String>) storm_conf.get(Config.TOPOLOGY_AUTO_CREDENTIALS); if (clazzes != null) { for (String clazz : clazzes) { IAutoCredentials a = (IAutoCredentials) Class.forName(clazz).newInstance(); a.prepare(storm_conf); autos.add(a); } } LOG.info("Got AutoCreds " + autos); return autos; } catch (Exception e) { throw new RuntimeException(e); } }
java
public static Collection<IAutoCredentials> GetAutoCredentials(Map storm_conf) { try { Set<IAutoCredentials> autos = new HashSet<IAutoCredentials>(); Collection<String> clazzes = (Collection<String>) storm_conf.get(Config.TOPOLOGY_AUTO_CREDENTIALS); if (clazzes != null) { for (String clazz : clazzes) { IAutoCredentials a = (IAutoCredentials) Class.forName(clazz).newInstance(); a.prepare(storm_conf); autos.add(a); } } LOG.info("Got AutoCreds " + autos); return autos; } catch (Exception e) { throw new RuntimeException(e); } }
[ "public", "static", "Collection", "<", "IAutoCredentials", ">", "GetAutoCredentials", "(", "Map", "storm_conf", ")", "{", "try", "{", "Set", "<", "IAutoCredentials", ">", "autos", "=", "new", "HashSet", "<", "IAutoCredentials", ">", "(", ")", ";", "Collection", "<", "String", ">", "clazzes", "=", "(", "Collection", "<", "String", ">", ")", "storm_conf", ".", "get", "(", "Config", ".", "TOPOLOGY_AUTO_CREDENTIALS", ")", ";", "if", "(", "clazzes", "!=", "null", ")", "{", "for", "(", "String", "clazz", ":", "clazzes", ")", "{", "IAutoCredentials", "a", "=", "(", "IAutoCredentials", ")", "Class", ".", "forName", "(", "clazz", ")", ".", "newInstance", "(", ")", ";", "a", ".", "prepare", "(", "storm_conf", ")", ";", "autos", ".", "add", "(", "a", ")", ";", "}", "}", "LOG", ".", "info", "(", "\"Got AutoCreds \"", "+", "autos", ")", ";", "return", "autos", ";", "}", "catch", "(", "Exception", "e", ")", "{", "throw", "new", "RuntimeException", "(", "e", ")", ";", "}", "}" ]
Get all of the configured AutoCredential Plugins. @param storm_conf the storm configuration to use. @return the configured auto credentials.
[ "Get", "all", "of", "the", "configured", "AutoCredential", "Plugins", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java#L163-L179
25,240
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java
AuthUtils.populateSubject
public static Subject populateSubject(Subject subject, Collection<IAutoCredentials> autos, Map<String, String> credentials) { try { if (subject == null) { subject = new Subject(); } for (IAutoCredentials autoCred : autos) { autoCred.populateSubject(subject, credentials); } return subject; } catch (Exception e) { throw new RuntimeException(e); } }
java
public static Subject populateSubject(Subject subject, Collection<IAutoCredentials> autos, Map<String, String> credentials) { try { if (subject == null) { subject = new Subject(); } for (IAutoCredentials autoCred : autos) { autoCred.populateSubject(subject, credentials); } return subject; } catch (Exception e) { throw new RuntimeException(e); } }
[ "public", "static", "Subject", "populateSubject", "(", "Subject", "subject", ",", "Collection", "<", "IAutoCredentials", ">", "autos", ",", "Map", "<", "String", ",", "String", ">", "credentials", ")", "{", "try", "{", "if", "(", "subject", "==", "null", ")", "{", "subject", "=", "new", "Subject", "(", ")", ";", "}", "for", "(", "IAutoCredentials", "autoCred", ":", "autos", ")", "{", "autoCred", ".", "populateSubject", "(", "subject", ",", "credentials", ")", ";", "}", "return", "subject", ";", "}", "catch", "(", "Exception", "e", ")", "{", "throw", "new", "RuntimeException", "(", "e", ")", ";", "}", "}" ]
Populate a subject from credentials using the IAutoCredentials. @param subject the subject to populate or null if a new Subject should be created. @param autos the IAutoCredentials to call to populate the subject. @param credentials the credentials to pull from @return the populated subject.
[ "Populate", "a", "subject", "from", "credentials", "using", "the", "IAutoCredentials", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java#L189-L201
25,241
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java
AuthUtils.GetTransportPlugin
public static ITransportPlugin GetTransportPlugin(ThriftConnectionType type, Map storm_conf, Configuration login_conf) { ITransportPlugin transportPlugin = null; try { String transport_plugin_klassName = type.getTransportPlugin(storm_conf); Class klass = Class.forName(transport_plugin_klassName); transportPlugin = (ITransportPlugin) klass.newInstance(); transportPlugin.prepare(type, storm_conf, login_conf); } catch (Exception e) { throw new RuntimeException(e); } return transportPlugin; }
java
public static ITransportPlugin GetTransportPlugin(ThriftConnectionType type, Map storm_conf, Configuration login_conf) { ITransportPlugin transportPlugin = null; try { String transport_plugin_klassName = type.getTransportPlugin(storm_conf); Class klass = Class.forName(transport_plugin_klassName); transportPlugin = (ITransportPlugin) klass.newInstance(); transportPlugin.prepare(type, storm_conf, login_conf); } catch (Exception e) { throw new RuntimeException(e); } return transportPlugin; }
[ "public", "static", "ITransportPlugin", "GetTransportPlugin", "(", "ThriftConnectionType", "type", ",", "Map", "storm_conf", ",", "Configuration", "login_conf", ")", "{", "ITransportPlugin", "transportPlugin", "=", "null", ";", "try", "{", "String", "transport_plugin_klassName", "=", "type", ".", "getTransportPlugin", "(", "storm_conf", ")", ";", "Class", "klass", "=", "Class", ".", "forName", "(", "transport_plugin_klassName", ")", ";", "transportPlugin", "=", "(", "ITransportPlugin", ")", "klass", ".", "newInstance", "(", ")", ";", "transportPlugin", ".", "prepare", "(", "type", ",", "storm_conf", ",", "login_conf", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "throw", "new", "RuntimeException", "(", "e", ")", ";", "}", "return", "transportPlugin", ";", "}" ]
Construct a transport plugin per storm configuration @param conf storm configuration @return
[ "Construct", "a", "transport", "plugin", "per", "storm", "configuration" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java#L230-L241
25,242
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java
AuthUtils.GetUiHttpCredentialsPlugin
public static IHttpCredentialsPlugin GetUiHttpCredentialsPlugin(Map conf) { String klassName = (String) conf.get(Config.UI_HTTP_CREDS_PLUGIN); return AuthUtils.GetHttpCredentialsPlugin(conf, klassName); }
java
public static IHttpCredentialsPlugin GetUiHttpCredentialsPlugin(Map conf) { String klassName = (String) conf.get(Config.UI_HTTP_CREDS_PLUGIN); return AuthUtils.GetHttpCredentialsPlugin(conf, klassName); }
[ "public", "static", "IHttpCredentialsPlugin", "GetUiHttpCredentialsPlugin", "(", "Map", "conf", ")", "{", "String", "klassName", "=", "(", "String", ")", "conf", ".", "get", "(", "Config", ".", "UI_HTTP_CREDS_PLUGIN", ")", ";", "return", "AuthUtils", ".", "GetHttpCredentialsPlugin", "(", "conf", ",", "klassName", ")", ";", "}" ]
Construct an HttpServletRequest credential plugin specified by the UI storm configuration @param conf storm configuration @return the plugin
[ "Construct", "an", "HttpServletRequest", "credential", "plugin", "specified", "by", "the", "UI", "storm", "configuration" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java#L261-L264
25,243
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java
AuthUtils.GetDrpcHttpCredentialsPlugin
public static IHttpCredentialsPlugin GetDrpcHttpCredentialsPlugin(Map conf) { String klassName = (String) conf.get(Config.DRPC_HTTP_CREDS_PLUGIN); return AuthUtils.GetHttpCredentialsPlugin(conf, klassName); }
java
public static IHttpCredentialsPlugin GetDrpcHttpCredentialsPlugin(Map conf) { String klassName = (String) conf.get(Config.DRPC_HTTP_CREDS_PLUGIN); return AuthUtils.GetHttpCredentialsPlugin(conf, klassName); }
[ "public", "static", "IHttpCredentialsPlugin", "GetDrpcHttpCredentialsPlugin", "(", "Map", "conf", ")", "{", "String", "klassName", "=", "(", "String", ")", "conf", ".", "get", "(", "Config", ".", "DRPC_HTTP_CREDS_PLUGIN", ")", ";", "return", "AuthUtils", ".", "GetHttpCredentialsPlugin", "(", "conf", ",", "klassName", ")", ";", "}" ]
Construct an HttpServletRequest credential plugin specified by the DRPC storm configuration @param conf storm configuration @return the plugin
[ "Construct", "an", "HttpServletRequest", "credential", "plugin", "specified", "by", "the", "DRPC", "storm", "configuration" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/AuthUtils.java#L272-L275
25,244
alibaba/jstorm
jstorm-core/src/main/java/storm/trident/operation/impl/ChainedAggregatorImpl.java
ChainedAggregatorImpl.increment
private boolean increment(TridentCollector[] lengths, int[] indices, int j) { if (j == -1) return false; indices[j]++; CaptureCollector capturer = (CaptureCollector) lengths[j]; if (indices[j] >= capturer.captured.size()) { indices[j] = 0; return increment(lengths, indices, j - 1); } return true; }
java
private boolean increment(TridentCollector[] lengths, int[] indices, int j) { if (j == -1) return false; indices[j]++; CaptureCollector capturer = (CaptureCollector) lengths[j]; if (indices[j] >= capturer.captured.size()) { indices[j] = 0; return increment(lengths, indices, j - 1); } return true; }
[ "private", "boolean", "increment", "(", "TridentCollector", "[", "]", "lengths", ",", "int", "[", "]", "indices", ",", "int", "j", ")", "{", "if", "(", "j", "==", "-", "1", ")", "return", "false", ";", "indices", "[", "j", "]", "++", ";", "CaptureCollector", "capturer", "=", "(", "CaptureCollector", ")", "lengths", "[", "j", "]", ";", "if", "(", "indices", "[", "j", "]", ">=", "capturer", ".", "captured", ".", "size", "(", ")", ")", "{", "indices", "[", "j", "]", "=", "0", ";", "return", "increment", "(", "lengths", ",", "indices", ",", "j", "-", "1", ")", ";", "}", "return", "true", ";", "}" ]
return false if can't increment anymore
[ "return", "false", "if", "can", "t", "increment", "anymore" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/storm/trident/operation/impl/ChainedAggregatorImpl.java#L95-L105
25,245
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/message/netty/NettyClient.java
NettyClient.doReconnect
public void doReconnect() { if (channelRef.get() != null) { // if (channelRef.get().isWritable()) { // LOG.info("already exist a writable channel, give up reconnect, {}", // channelRef.get()); // return; // } return; } if (isClosed()) { return; } if (isConnecting.getAndSet(true)) { LOG.info("Connect twice {}", name()); return; } long sleepMs = getSleepTimeMs(); LOG.info("Reconnect ... [{}], {}, sleep {}ms", retries.get(), name, sleepMs); ChannelFuture future = bootstrap.connect(remoteAddr); future.addListener(new ChannelFutureListener() { public void operationComplete(ChannelFuture future) throws Exception { isConnecting.set(false); Channel channel = future.getChannel(); if (future.isSuccess()) { // do something else LOG.info("Connection established, channel = :{}", channel); setChannel(channel); // handleResponse(); BATCH_THRESHOLD_WARN = ConfigExtension.getNettyBufferThresholdSize(stormConf); // Check if any pending message /*synchronized (writeLock) { if (channel != null && messageBuffer.size() > 0) { MessageBatch messageBatch = messageBuffer.drain(); flushRequest(channel, messageBatch); } else { LOG.warn("Failed to flush pending message after reconnecting, channel={}, messageBuffer.size={}", channel, messageBuffer.size()); } }*/ } else { if (!isClosed()) { LOG.info("Failed to reconnect ... [{}], {}, channel = {}, cause = {}", retries.get(), name, channel, future.getCause()); reconnect(); } } } }); JStormUtils.sleepMs(sleepMs); }
java
public void doReconnect() { if (channelRef.get() != null) { // if (channelRef.get().isWritable()) { // LOG.info("already exist a writable channel, give up reconnect, {}", // channelRef.get()); // return; // } return; } if (isClosed()) { return; } if (isConnecting.getAndSet(true)) { LOG.info("Connect twice {}", name()); return; } long sleepMs = getSleepTimeMs(); LOG.info("Reconnect ... [{}], {}, sleep {}ms", retries.get(), name, sleepMs); ChannelFuture future = bootstrap.connect(remoteAddr); future.addListener(new ChannelFutureListener() { public void operationComplete(ChannelFuture future) throws Exception { isConnecting.set(false); Channel channel = future.getChannel(); if (future.isSuccess()) { // do something else LOG.info("Connection established, channel = :{}", channel); setChannel(channel); // handleResponse(); BATCH_THRESHOLD_WARN = ConfigExtension.getNettyBufferThresholdSize(stormConf); // Check if any pending message /*synchronized (writeLock) { if (channel != null && messageBuffer.size() > 0) { MessageBatch messageBatch = messageBuffer.drain(); flushRequest(channel, messageBatch); } else { LOG.warn("Failed to flush pending message after reconnecting, channel={}, messageBuffer.size={}", channel, messageBuffer.size()); } }*/ } else { if (!isClosed()) { LOG.info("Failed to reconnect ... [{}], {}, channel = {}, cause = {}", retries.get(), name, channel, future.getCause()); reconnect(); } } } }); JStormUtils.sleepMs(sleepMs); }
[ "public", "void", "doReconnect", "(", ")", "{", "if", "(", "channelRef", ".", "get", "(", ")", "!=", "null", ")", "{", "// if (channelRef.get().isWritable()) {", "// LOG.info(\"already exist a writable channel, give up reconnect, {}\",", "// channelRef.get());", "// return;", "// }", "return", ";", "}", "if", "(", "isClosed", "(", ")", ")", "{", "return", ";", "}", "if", "(", "isConnecting", ".", "getAndSet", "(", "true", ")", ")", "{", "LOG", ".", "info", "(", "\"Connect twice {}\"", ",", "name", "(", ")", ")", ";", "return", ";", "}", "long", "sleepMs", "=", "getSleepTimeMs", "(", ")", ";", "LOG", ".", "info", "(", "\"Reconnect ... [{}], {}, sleep {}ms\"", ",", "retries", ".", "get", "(", ")", ",", "name", ",", "sleepMs", ")", ";", "ChannelFuture", "future", "=", "bootstrap", ".", "connect", "(", "remoteAddr", ")", ";", "future", ".", "addListener", "(", "new", "ChannelFutureListener", "(", ")", "{", "public", "void", "operationComplete", "(", "ChannelFuture", "future", ")", "throws", "Exception", "{", "isConnecting", ".", "set", "(", "false", ")", ";", "Channel", "channel", "=", "future", ".", "getChannel", "(", ")", ";", "if", "(", "future", ".", "isSuccess", "(", ")", ")", "{", "// do something else", "LOG", ".", "info", "(", "\"Connection established, channel = :{}\"", ",", "channel", ")", ";", "setChannel", "(", "channel", ")", ";", "// handleResponse();", "BATCH_THRESHOLD_WARN", "=", "ConfigExtension", ".", "getNettyBufferThresholdSize", "(", "stormConf", ")", ";", "// Check if any pending message", "/*synchronized (writeLock) {\n if (channel != null && messageBuffer.size() > 0) {\n MessageBatch messageBatch = messageBuffer.drain();\n flushRequest(channel, messageBatch);\n } else {\n LOG.warn(\"Failed to flush pending message after reconnecting, channel={}, messageBuffer.size={}\",\n channel, messageBuffer.size());\n }\n }*/", "}", "else", "{", "if", "(", "!", "isClosed", "(", ")", ")", "{", "LOG", ".", "info", "(", "\"Failed to reconnect ... [{}], {}, channel = {}, cause = {}\"", ",", "retries", ".", "get", "(", ")", ",", "name", ",", "channel", ",", "future", ".", "getCause", "(", ")", ")", ";", "reconnect", "(", ")", ";", "}", "}", "}", "}", ")", ";", "JStormUtils", ".", "sleepMs", "(", "sleepMs", ")", ";", "}" ]
The function can't be synchronized, otherwise it will cause deadlock
[ "The", "function", "can", "t", "be", "synchronized", "otherwise", "it", "will", "cause", "deadlock" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/message/netty/NettyClient.java#L244-L297
25,246
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/message/netty/NettyClient.java
NettyClient.closeChannel
void closeChannel(final Channel channel) { synchronized (channelClosing) { if (closingChannel.contains(channel)) { LOG.info(channel.toString() + " has already been closed"); return; } closingChannel.add(channel); } LOG.debug(channel.toString() + " begin to close"); ChannelFuture closeFuture = channel.close(); closeFuture.addListener(new ChannelFutureListener() { public void operationComplete(ChannelFuture future) throws Exception { synchronized (channelClosing) { closingChannel.remove(channel); } LOG.debug(channel.toString() + " closed."); } }); }
java
void closeChannel(final Channel channel) { synchronized (channelClosing) { if (closingChannel.contains(channel)) { LOG.info(channel.toString() + " has already been closed"); return; } closingChannel.add(channel); } LOG.debug(channel.toString() + " begin to close"); ChannelFuture closeFuture = channel.close(); closeFuture.addListener(new ChannelFutureListener() { public void operationComplete(ChannelFuture future) throws Exception { synchronized (channelClosing) { closingChannel.remove(channel); } LOG.debug(channel.toString() + " closed."); } }); }
[ "void", "closeChannel", "(", "final", "Channel", "channel", ")", "{", "synchronized", "(", "channelClosing", ")", "{", "if", "(", "closingChannel", ".", "contains", "(", "channel", ")", ")", "{", "LOG", ".", "info", "(", "channel", ".", "toString", "(", ")", "+", "\" has already been closed\"", ")", ";", "return", ";", "}", "closingChannel", ".", "add", "(", "channel", ")", ";", "}", "LOG", ".", "debug", "(", "channel", ".", "toString", "(", ")", "+", "\" begin to close\"", ")", ";", "ChannelFuture", "closeFuture", "=", "channel", ".", "close", "(", ")", ";", "closeFuture", ".", "addListener", "(", "new", "ChannelFutureListener", "(", ")", "{", "public", "void", "operationComplete", "(", "ChannelFuture", "future", ")", "throws", "Exception", "{", "synchronized", "(", "channelClosing", ")", "{", "closingChannel", ".", "remove", "(", "channel", ")", ";", "}", "LOG", ".", "debug", "(", "channel", ".", "toString", "(", ")", "+", "\" closed.\"", ")", ";", "}", "}", ")", ";", "}" ]
Avoid channel double close
[ "Avoid", "channel", "double", "close" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/message/netty/NettyClient.java#L454-L475
25,247
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/task/execute/spout/SpoutExecutors.java
SpoutExecutors.onEvent
@Override public void onEvent(Object event, long sequence, boolean endOfBatch) throws Exception { try { if (event == null) { return; } Runnable runnable = null; if (event instanceof Tuple) { if (((TupleExt) event).isBatchTuple()) { List<Object> values = ((Tuple) event).getValues(); for (Object value : values) { Pair<MessageId, List<Object>> val = (Pair<MessageId, List<Object>>) value; TupleImplExt tuple = new TupleImplExt( sysTopologyCtx, val.getSecond(), val.getFirst(), ((TupleImplExt) event)); runnable = processTupleEvent(tuple); if (runnable != null) { runnable.run(); runnable = null; } } } else { runnable = processTupleEvent((Tuple) event); } } else if (event instanceof TimerTrigger.TimerEvent) { processTimerEvent((TimerTrigger.TimerEvent) event); return; } else if (event instanceof IAckMsg) { runnable = (Runnable) event; } else if (event instanceof Runnable) { runnable = (Runnable) event; } else { LOG.warn("Receive one unknown event-" + event.toString() + " " + idStr); return; } if (runnable != null) runnable.run(); } catch (Throwable e) { if (!taskStatus.isShutdown()) { LOG.info("Unknown exception ", e); reportError.report(e); } } }
java
@Override public void onEvent(Object event, long sequence, boolean endOfBatch) throws Exception { try { if (event == null) { return; } Runnable runnable = null; if (event instanceof Tuple) { if (((TupleExt) event).isBatchTuple()) { List<Object> values = ((Tuple) event).getValues(); for (Object value : values) { Pair<MessageId, List<Object>> val = (Pair<MessageId, List<Object>>) value; TupleImplExt tuple = new TupleImplExt( sysTopologyCtx, val.getSecond(), val.getFirst(), ((TupleImplExt) event)); runnable = processTupleEvent(tuple); if (runnable != null) { runnable.run(); runnable = null; } } } else { runnable = processTupleEvent((Tuple) event); } } else if (event instanceof TimerTrigger.TimerEvent) { processTimerEvent((TimerTrigger.TimerEvent) event); return; } else if (event instanceof IAckMsg) { runnable = (Runnable) event; } else if (event instanceof Runnable) { runnable = (Runnable) event; } else { LOG.warn("Receive one unknown event-" + event.toString() + " " + idStr); return; } if (runnable != null) runnable.run(); } catch (Throwable e) { if (!taskStatus.isShutdown()) { LOG.info("Unknown exception ", e); reportError.report(e); } } }
[ "@", "Override", "public", "void", "onEvent", "(", "Object", "event", ",", "long", "sequence", ",", "boolean", "endOfBatch", ")", "throws", "Exception", "{", "try", "{", "if", "(", "event", "==", "null", ")", "{", "return", ";", "}", "Runnable", "runnable", "=", "null", ";", "if", "(", "event", "instanceof", "Tuple", ")", "{", "if", "(", "(", "(", "TupleExt", ")", "event", ")", ".", "isBatchTuple", "(", ")", ")", "{", "List", "<", "Object", ">", "values", "=", "(", "(", "Tuple", ")", "event", ")", ".", "getValues", "(", ")", ";", "for", "(", "Object", "value", ":", "values", ")", "{", "Pair", "<", "MessageId", ",", "List", "<", "Object", ">", ">", "val", "=", "(", "Pair", "<", "MessageId", ",", "List", "<", "Object", ">", ">", ")", "value", ";", "TupleImplExt", "tuple", "=", "new", "TupleImplExt", "(", "sysTopologyCtx", ",", "val", ".", "getSecond", "(", ")", ",", "val", ".", "getFirst", "(", ")", ",", "(", "(", "TupleImplExt", ")", "event", ")", ")", ";", "runnable", "=", "processTupleEvent", "(", "tuple", ")", ";", "if", "(", "runnable", "!=", "null", ")", "{", "runnable", ".", "run", "(", ")", ";", "runnable", "=", "null", ";", "}", "}", "}", "else", "{", "runnable", "=", "processTupleEvent", "(", "(", "Tuple", ")", "event", ")", ";", "}", "}", "else", "if", "(", "event", "instanceof", "TimerTrigger", ".", "TimerEvent", ")", "{", "processTimerEvent", "(", "(", "TimerTrigger", ".", "TimerEvent", ")", "event", ")", ";", "return", ";", "}", "else", "if", "(", "event", "instanceof", "IAckMsg", ")", "{", "runnable", "=", "(", "Runnable", ")", "event", ";", "}", "else", "if", "(", "event", "instanceof", "Runnable", ")", "{", "runnable", "=", "(", "Runnable", ")", "event", ";", "}", "else", "{", "LOG", ".", "warn", "(", "\"Receive one unknown event-\"", "+", "event", ".", "toString", "(", ")", "+", "\" \"", "+", "idStr", ")", ";", "return", ";", "}", "if", "(", "runnable", "!=", "null", ")", "runnable", ".", "run", "(", ")", ";", "}", "catch", "(", "Throwable", "e", ")", "{", "if", "(", "!", "taskStatus", ".", "isShutdown", "(", ")", ")", "{", "LOG", ".", "info", "(", "\"Unknown exception \"", ",", "e", ")", ";", "reportError", ".", "report", "(", "e", ")", ";", "}", "}", "}" ]
Handle acker message @see com.lmax.disruptor.EventHandler#onEvent(java.lang.Object, long, boolean)
[ "Handle", "acker", "message" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/task/execute/spout/SpoutExecutors.java#L205-L250
25,248
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/spout/CheckpointSpout.java
CheckpointSpout.loadCheckpointState
private KeyValueState<String, CheckPointState> loadCheckpointState(Map conf, TopologyContext ctx) { String namespace = ctx.getThisComponentId() + "-" + ctx.getThisTaskId(); KeyValueState<String, CheckPointState> state = (KeyValueState<String, CheckPointState>) StateFactory.getState(namespace, conf, ctx); if (state.get(TX_STATE_KEY) == null) { CheckPointState txState = new CheckPointState(-1, CheckPointState.State.COMMITTED); state.put(TX_STATE_KEY, txState); state.commit(); LOG.debug("Initialized checkpoint spout state with txState {}", txState); } else { LOG.debug("Got checkpoint spout state {}", state.get(TX_STATE_KEY)); } return state; }
java
private KeyValueState<String, CheckPointState> loadCheckpointState(Map conf, TopologyContext ctx) { String namespace = ctx.getThisComponentId() + "-" + ctx.getThisTaskId(); KeyValueState<String, CheckPointState> state = (KeyValueState<String, CheckPointState>) StateFactory.getState(namespace, conf, ctx); if (state.get(TX_STATE_KEY) == null) { CheckPointState txState = new CheckPointState(-1, CheckPointState.State.COMMITTED); state.put(TX_STATE_KEY, txState); state.commit(); LOG.debug("Initialized checkpoint spout state with txState {}", txState); } else { LOG.debug("Got checkpoint spout state {}", state.get(TX_STATE_KEY)); } return state; }
[ "private", "KeyValueState", "<", "String", ",", "CheckPointState", ">", "loadCheckpointState", "(", "Map", "conf", ",", "TopologyContext", "ctx", ")", "{", "String", "namespace", "=", "ctx", ".", "getThisComponentId", "(", ")", "+", "\"-\"", "+", "ctx", ".", "getThisTaskId", "(", ")", ";", "KeyValueState", "<", "String", ",", "CheckPointState", ">", "state", "=", "(", "KeyValueState", "<", "String", ",", "CheckPointState", ">", ")", "StateFactory", ".", "getState", "(", "namespace", ",", "conf", ",", "ctx", ")", ";", "if", "(", "state", ".", "get", "(", "TX_STATE_KEY", ")", "==", "null", ")", "{", "CheckPointState", "txState", "=", "new", "CheckPointState", "(", "-", "1", ",", "CheckPointState", ".", "State", ".", "COMMITTED", ")", ";", "state", ".", "put", "(", "TX_STATE_KEY", ",", "txState", ")", ";", "state", ".", "commit", "(", ")", ";", "LOG", ".", "debug", "(", "\"Initialized checkpoint spout state with txState {}\"", ",", "txState", ")", ";", "}", "else", "{", "LOG", ".", "debug", "(", "\"Got checkpoint spout state {}\"", ",", "state", ".", "get", "(", "TX_STATE_KEY", ")", ")", ";", "}", "return", "state", ";", "}" ]
Loads the last saved checkpoint state the from persistent storage.
[ "Loads", "the", "last", "saved", "checkpoint", "state", "the", "from", "persistent", "storage", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/spout/CheckpointSpout.java#L134-L147
25,249
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java
SyncProcessEvent.checkNeedUpdateTopologies
@SuppressWarnings("unchecked") private void checkNeedUpdateTopologies(Map<String, StateHeartbeat> localWorkerStats, Map<Integer, LocalAssignment> localAssignments) throws Exception { Set<String> topologies = new HashSet<>(); for (Map.Entry<Integer, LocalAssignment> entry : localAssignments.entrySet()) { topologies.add(entry.getValue().getTopologyId()); } for (StateHeartbeat stateHb : localWorkerStats.values()) { State state = stateHb.getState(); if (!state.equals(State.notStarted)) { String topologyId = stateHb.getHeartbeat().getTopologyId(); topologies.remove(topologyId); } } long currTime = System.currentTimeMillis(); Set<String> needRemoveTopologies = new HashSet<>(); for (String topologyId : topologies) { try { long lastModifyTime = StormConfig.get_supervisor_topology_Bianrymodify_time(conf, topologyId); if ((currTime - lastModifyTime) / 1000 < (JStormUtils.MIN_1 * 2)) { LOG.debug("less than 2 minute, removing " + topologyId); needRemoveTopologies.add(topologyId); } } catch (Exception e) { LOG.error("Failed to get last modified time for topology" + topologyId, e); needRemoveTopologies.add(topologyId); } } topologies.removeAll(needRemoveTopologies); if (topologies.size() > 0) { LOG.debug("Following topologies are going to re-download jars, " + topologies); } needDownloadTopologies.set(topologies); }
java
@SuppressWarnings("unchecked") private void checkNeedUpdateTopologies(Map<String, StateHeartbeat> localWorkerStats, Map<Integer, LocalAssignment> localAssignments) throws Exception { Set<String> topologies = new HashSet<>(); for (Map.Entry<Integer, LocalAssignment> entry : localAssignments.entrySet()) { topologies.add(entry.getValue().getTopologyId()); } for (StateHeartbeat stateHb : localWorkerStats.values()) { State state = stateHb.getState(); if (!state.equals(State.notStarted)) { String topologyId = stateHb.getHeartbeat().getTopologyId(); topologies.remove(topologyId); } } long currTime = System.currentTimeMillis(); Set<String> needRemoveTopologies = new HashSet<>(); for (String topologyId : topologies) { try { long lastModifyTime = StormConfig.get_supervisor_topology_Bianrymodify_time(conf, topologyId); if ((currTime - lastModifyTime) / 1000 < (JStormUtils.MIN_1 * 2)) { LOG.debug("less than 2 minute, removing " + topologyId); needRemoveTopologies.add(topologyId); } } catch (Exception e) { LOG.error("Failed to get last modified time for topology" + topologyId, e); needRemoveTopologies.add(topologyId); } } topologies.removeAll(needRemoveTopologies); if (topologies.size() > 0) { LOG.debug("Following topologies are going to re-download jars, " + topologies); } needDownloadTopologies.set(topologies); }
[ "@", "SuppressWarnings", "(", "\"unchecked\"", ")", "private", "void", "checkNeedUpdateTopologies", "(", "Map", "<", "String", ",", "StateHeartbeat", ">", "localWorkerStats", ",", "Map", "<", "Integer", ",", "LocalAssignment", ">", "localAssignments", ")", "throws", "Exception", "{", "Set", "<", "String", ">", "topologies", "=", "new", "HashSet", "<>", "(", ")", ";", "for", "(", "Map", ".", "Entry", "<", "Integer", ",", "LocalAssignment", ">", "entry", ":", "localAssignments", ".", "entrySet", "(", ")", ")", "{", "topologies", ".", "add", "(", "entry", ".", "getValue", "(", ")", ".", "getTopologyId", "(", ")", ")", ";", "}", "for", "(", "StateHeartbeat", "stateHb", ":", "localWorkerStats", ".", "values", "(", ")", ")", "{", "State", "state", "=", "stateHb", ".", "getState", "(", ")", ";", "if", "(", "!", "state", ".", "equals", "(", "State", ".", "notStarted", ")", ")", "{", "String", "topologyId", "=", "stateHb", ".", "getHeartbeat", "(", ")", ".", "getTopologyId", "(", ")", ";", "topologies", ".", "remove", "(", "topologyId", ")", ";", "}", "}", "long", "currTime", "=", "System", ".", "currentTimeMillis", "(", ")", ";", "Set", "<", "String", ">", "needRemoveTopologies", "=", "new", "HashSet", "<>", "(", ")", ";", "for", "(", "String", "topologyId", ":", "topologies", ")", "{", "try", "{", "long", "lastModifyTime", "=", "StormConfig", ".", "get_supervisor_topology_Bianrymodify_time", "(", "conf", ",", "topologyId", ")", ";", "if", "(", "(", "currTime", "-", "lastModifyTime", ")", "/", "1000", "<", "(", "JStormUtils", ".", "MIN_1", "*", "2", ")", ")", "{", "LOG", ".", "debug", "(", "\"less than 2 minute, removing \"", "+", "topologyId", ")", ";", "needRemoveTopologies", ".", "add", "(", "topologyId", ")", ";", "}", "}", "catch", "(", "Exception", "e", ")", "{", "LOG", ".", "error", "(", "\"Failed to get last modified time for topology\"", "+", "topologyId", ",", "e", ")", ";", "needRemoveTopologies", ".", "add", "(", "topologyId", ")", ";", "}", "}", "topologies", ".", "removeAll", "(", "needRemoveTopologies", ")", ";", "if", "(", "topologies", ".", "size", "(", ")", ">", "0", ")", "{", "LOG", ".", "debug", "(", "\"Following topologies are going to re-download jars, \"", "+", "topologies", ")", ";", "}", "needDownloadTopologies", ".", "set", "(", "topologies", ")", ";", "}" ]
check all workers is failed or not
[ "check", "all", "workers", "is", "failed", "or", "not" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java#L267-L303
25,250
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java
SyncProcessEvent.markAllNewWorkers
public void markAllNewWorkers(Map<Integer, String> workerIds) { int startTime = TimeUtils.current_time_secs(); for (Entry<Integer, String> entry : workerIds.entrySet()) { String oldWorkerIds = portToWorkerId.get(entry.getKey()); if (oldWorkerIds != null) { workerIdToStartTimeAndPort.remove(oldWorkerIds); // update portToWorkerId LOG.info("A port is still occupied by an old worker, remove useless " + oldWorkerIds + " from workerIdToStartTimeAndPort"); } portToWorkerId.put(entry.getKey(), entry.getValue()); workerIdToStartTimeAndPort.put(entry.getValue(), new Pair<>(startTime, entry.getKey())); } }
java
public void markAllNewWorkers(Map<Integer, String> workerIds) { int startTime = TimeUtils.current_time_secs(); for (Entry<Integer, String> entry : workerIds.entrySet()) { String oldWorkerIds = portToWorkerId.get(entry.getKey()); if (oldWorkerIds != null) { workerIdToStartTimeAndPort.remove(oldWorkerIds); // update portToWorkerId LOG.info("A port is still occupied by an old worker, remove useless " + oldWorkerIds + " from workerIdToStartTimeAndPort"); } portToWorkerId.put(entry.getKey(), entry.getValue()); workerIdToStartTimeAndPort.put(entry.getValue(), new Pair<>(startTime, entry.getKey())); } }
[ "public", "void", "markAllNewWorkers", "(", "Map", "<", "Integer", ",", "String", ">", "workerIds", ")", "{", "int", "startTime", "=", "TimeUtils", ".", "current_time_secs", "(", ")", ";", "for", "(", "Entry", "<", "Integer", ",", "String", ">", "entry", ":", "workerIds", ".", "entrySet", "(", ")", ")", "{", "String", "oldWorkerIds", "=", "portToWorkerId", ".", "get", "(", "entry", ".", "getKey", "(", ")", ")", ";", "if", "(", "oldWorkerIds", "!=", "null", ")", "{", "workerIdToStartTimeAndPort", ".", "remove", "(", "oldWorkerIds", ")", ";", "// update portToWorkerId", "LOG", ".", "info", "(", "\"A port is still occupied by an old worker, remove useless \"", "+", "oldWorkerIds", "+", "\" from workerIdToStartTimeAndPort\"", ")", ";", "}", "portToWorkerId", ".", "put", "(", "entry", ".", "getKey", "(", ")", ",", "entry", ".", "getValue", "(", ")", ")", ";", "workerIdToStartTimeAndPort", ".", "put", "(", "entry", ".", "getValue", "(", ")", ",", "new", "Pair", "<>", "(", "startTime", ",", "entry", ".", "getKey", "(", ")", ")", ")", ";", "}", "}" ]
mark all new Workers like 52b11418-7474-446d-bff5-0ecd68f4954f
[ "mark", "all", "new", "Workers", "like", "52b11418", "-", "7474", "-", "446d", "-", "bff5", "-", "0ecd68f4954f" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java#L308-L322
25,251
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java
SyncProcessEvent.checkNewWorkers
public void checkNewWorkers(Map conf) throws IOException, InterruptedException { Set<String> workers = new HashSet<>(); for (Entry<String, Pair<Integer, Integer>> entry : workerIdToStartTimeAndPort.entrySet()) { String workerId = entry.getKey(); int startTime = entry.getValue().getFirst(); LocalState ls = StormConfig.worker_state(conf, workerId); WorkerHeartbeat whb = (WorkerHeartbeat) ls.get(Common.LS_WORKER_HEARTBEAT); if (whb == null) { if ((TimeUtils.current_time_secs() - startTime) < JStormUtils.parseInt(conf.get(Config.SUPERVISOR_WORKER_START_TIMEOUT_SECS))) { LOG.info(workerId + " still hasn't started"); } else { LOG.error("Failed to start Worker " + workerId); workers.add(workerId); } } else { LOG.info("Successfully started worker " + workerId); workers.add(workerId); } } for (String workerId : workers) { Integer port = this.workerIdToStartTimeAndPort.get(workerId).getSecond(); this.workerIdToStartTimeAndPort.remove(workerId); this.portToWorkerId.remove(port); } }
java
public void checkNewWorkers(Map conf) throws IOException, InterruptedException { Set<String> workers = new HashSet<>(); for (Entry<String, Pair<Integer, Integer>> entry : workerIdToStartTimeAndPort.entrySet()) { String workerId = entry.getKey(); int startTime = entry.getValue().getFirst(); LocalState ls = StormConfig.worker_state(conf, workerId); WorkerHeartbeat whb = (WorkerHeartbeat) ls.get(Common.LS_WORKER_HEARTBEAT); if (whb == null) { if ((TimeUtils.current_time_secs() - startTime) < JStormUtils.parseInt(conf.get(Config.SUPERVISOR_WORKER_START_TIMEOUT_SECS))) { LOG.info(workerId + " still hasn't started"); } else { LOG.error("Failed to start Worker " + workerId); workers.add(workerId); } } else { LOG.info("Successfully started worker " + workerId); workers.add(workerId); } } for (String workerId : workers) { Integer port = this.workerIdToStartTimeAndPort.get(workerId).getSecond(); this.workerIdToStartTimeAndPort.remove(workerId); this.portToWorkerId.remove(port); } }
[ "public", "void", "checkNewWorkers", "(", "Map", "conf", ")", "throws", "IOException", ",", "InterruptedException", "{", "Set", "<", "String", ">", "workers", "=", "new", "HashSet", "<>", "(", ")", ";", "for", "(", "Entry", "<", "String", ",", "Pair", "<", "Integer", ",", "Integer", ">", ">", "entry", ":", "workerIdToStartTimeAndPort", ".", "entrySet", "(", ")", ")", "{", "String", "workerId", "=", "entry", ".", "getKey", "(", ")", ";", "int", "startTime", "=", "entry", ".", "getValue", "(", ")", ".", "getFirst", "(", ")", ";", "LocalState", "ls", "=", "StormConfig", ".", "worker_state", "(", "conf", ",", "workerId", ")", ";", "WorkerHeartbeat", "whb", "=", "(", "WorkerHeartbeat", ")", "ls", ".", "get", "(", "Common", ".", "LS_WORKER_HEARTBEAT", ")", ";", "if", "(", "whb", "==", "null", ")", "{", "if", "(", "(", "TimeUtils", ".", "current_time_secs", "(", ")", "-", "startTime", ")", "<", "JStormUtils", ".", "parseInt", "(", "conf", ".", "get", "(", "Config", ".", "SUPERVISOR_WORKER_START_TIMEOUT_SECS", ")", ")", ")", "{", "LOG", ".", "info", "(", "workerId", "+", "\" still hasn't started\"", ")", ";", "}", "else", "{", "LOG", ".", "error", "(", "\"Failed to start Worker \"", "+", "workerId", ")", ";", "workers", ".", "add", "(", "workerId", ")", ";", "}", "}", "else", "{", "LOG", ".", "info", "(", "\"Successfully started worker \"", "+", "workerId", ")", ";", "workers", ".", "add", "(", "workerId", ")", ";", "}", "}", "for", "(", "String", "workerId", ":", "workers", ")", "{", "Integer", "port", "=", "this", ".", "workerIdToStartTimeAndPort", ".", "get", "(", "workerId", ")", ".", "getSecond", "(", ")", ";", "this", ".", "workerIdToStartTimeAndPort", ".", "remove", "(", "workerId", ")", ";", "this", ".", "portToWorkerId", ".", "remove", "(", "port", ")", ";", "}", "}" ]
check whether timestamp of new workers is not > SUPERVISOR_WORKER_START_TIMEOUT_SECS, otherwise mark as failed
[ "check", "whether", "timestamp", "of", "new", "workers", "is", "not", ">", "SUPERVISOR_WORKER_START_TIMEOUT_SECS", "otherwise", "mark", "as", "failed" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java#L327-L352
25,252
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java
SyncProcessEvent.matchesAssignment
public boolean matchesAssignment(WorkerHeartbeat whb, Map<Integer, LocalAssignment> assignedTasks) { boolean isMatch = true; LocalAssignment localAssignment = assignedTasks.get(whb.getPort()); if (localAssignment == null) { LOG.debug("Following worker has been removed, port=" + whb.getPort() + ", assignedTasks=" + assignedTasks); isMatch = false; } else if (!whb.getTopologyId().equals(localAssignment.getTopologyId())) { // topology id not equal LOG.info("topology id not equal whb=" + whb.getTopologyId() + ",localAssignment=" + localAssignment.getTopologyId()); isMatch = false; }/* * else if (!(whb.getTaskIds().equals(localAssignment.getTaskIds()))) { // task-id isn't equal LOG.info("task-id isn't equal whb=" + whb.getTaskIds() + * ",localAssignment=" + localAssignment.getTaskIds()); isMatch = false; } */ return isMatch; }
java
public boolean matchesAssignment(WorkerHeartbeat whb, Map<Integer, LocalAssignment> assignedTasks) { boolean isMatch = true; LocalAssignment localAssignment = assignedTasks.get(whb.getPort()); if (localAssignment == null) { LOG.debug("Following worker has been removed, port=" + whb.getPort() + ", assignedTasks=" + assignedTasks); isMatch = false; } else if (!whb.getTopologyId().equals(localAssignment.getTopologyId())) { // topology id not equal LOG.info("topology id not equal whb=" + whb.getTopologyId() + ",localAssignment=" + localAssignment.getTopologyId()); isMatch = false; }/* * else if (!(whb.getTaskIds().equals(localAssignment.getTaskIds()))) { // task-id isn't equal LOG.info("task-id isn't equal whb=" + whb.getTaskIds() + * ",localAssignment=" + localAssignment.getTaskIds()); isMatch = false; } */ return isMatch; }
[ "public", "boolean", "matchesAssignment", "(", "WorkerHeartbeat", "whb", ",", "Map", "<", "Integer", ",", "LocalAssignment", ">", "assignedTasks", ")", "{", "boolean", "isMatch", "=", "true", ";", "LocalAssignment", "localAssignment", "=", "assignedTasks", ".", "get", "(", "whb", ".", "getPort", "(", ")", ")", ";", "if", "(", "localAssignment", "==", "null", ")", "{", "LOG", ".", "debug", "(", "\"Following worker has been removed, port=\"", "+", "whb", ".", "getPort", "(", ")", "+", "\", assignedTasks=\"", "+", "assignedTasks", ")", ";", "isMatch", "=", "false", ";", "}", "else", "if", "(", "!", "whb", ".", "getTopologyId", "(", ")", ".", "equals", "(", "localAssignment", ".", "getTopologyId", "(", ")", ")", ")", "{", "// topology id not equal", "LOG", ".", "info", "(", "\"topology id not equal whb=\"", "+", "whb", ".", "getTopologyId", "(", ")", "+", "\",localAssignment=\"", "+", "localAssignment", ".", "getTopologyId", "(", ")", ")", ";", "isMatch", "=", "false", ";", "}", "/*\n * else if (!(whb.getTaskIds().equals(localAssignment.getTaskIds()))) { // task-id isn't equal LOG.info(\"task-id isn't equal whb=\" + whb.getTaskIds() +\n * \",localAssignment=\" + localAssignment.getTaskIds()); isMatch = false; }\n */", "return", "isMatch", ";", "}" ]
check whether the worker heartbeat is allowed in the assignedTasks @param whb WorkerHeartbeat @param assignedTasks assigned tasks @return if true, the assignments(LS-LOCAL-ASSIGNMENTS) match with worker heartbeat, false otherwise
[ "check", "whether", "the", "worker", "heartbeat", "is", "allowed", "in", "the", "assignedTasks" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java#L437-L453
25,253
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java
SyncProcessEvent.readWorkerHeartbeats
public Map<String, WorkerHeartbeat> readWorkerHeartbeats(Map conf) throws Exception { Map<String, WorkerHeartbeat> workerHeartbeats = new HashMap<>(); // get the path: STORM-LOCAL-DIR/workers String path = StormConfig.worker_root(conf); List<String> workerIds = PathUtils.read_dir_contents(path); if (workerIds == null) { LOG.info("No worker dir under " + path); return workerHeartbeats; } for (String workerId : workerIds) { WorkerHeartbeat whb = readWorkerHeartbeat(conf, workerId); // ATTENTION: whb can be null workerHeartbeats.put(workerId, whb); } return workerHeartbeats; }
java
public Map<String, WorkerHeartbeat> readWorkerHeartbeats(Map conf) throws Exception { Map<String, WorkerHeartbeat> workerHeartbeats = new HashMap<>(); // get the path: STORM-LOCAL-DIR/workers String path = StormConfig.worker_root(conf); List<String> workerIds = PathUtils.read_dir_contents(path); if (workerIds == null) { LOG.info("No worker dir under " + path); return workerHeartbeats; } for (String workerId : workerIds) { WorkerHeartbeat whb = readWorkerHeartbeat(conf, workerId); // ATTENTION: whb can be null workerHeartbeats.put(workerId, whb); } return workerHeartbeats; }
[ "public", "Map", "<", "String", ",", "WorkerHeartbeat", ">", "readWorkerHeartbeats", "(", "Map", "conf", ")", "throws", "Exception", "{", "Map", "<", "String", ",", "WorkerHeartbeat", ">", "workerHeartbeats", "=", "new", "HashMap", "<>", "(", ")", ";", "// get the path: STORM-LOCAL-DIR/workers", "String", "path", "=", "StormConfig", ".", "worker_root", "(", "conf", ")", ";", "List", "<", "String", ">", "workerIds", "=", "PathUtils", ".", "read_dir_contents", "(", "path", ")", ";", "if", "(", "workerIds", "==", "null", ")", "{", "LOG", ".", "info", "(", "\"No worker dir under \"", "+", "path", ")", ";", "return", "workerHeartbeats", ";", "}", "for", "(", "String", "workerId", ":", "workerIds", ")", "{", "WorkerHeartbeat", "whb", "=", "readWorkerHeartbeat", "(", "conf", ",", "workerId", ")", ";", "// ATTENTION: whb can be null", "workerHeartbeats", ".", "put", "(", "workerId", ",", "whb", ")", ";", "}", "return", "workerHeartbeats", ";", "}" ]
get all workers heartbeats of the supervisor @param conf conf @throws IOException
[ "get", "all", "workers", "heartbeats", "of", "the", "supervisor" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java#L461-L480
25,254
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java
SyncProcessEvent.readWorkerHeartbeat
public WorkerHeartbeat readWorkerHeartbeat(Map conf, String workerId) throws Exception { try { LocalState ls = StormConfig.worker_state(conf, workerId); return (WorkerHeartbeat) ls.get(Common.LS_WORKER_HEARTBEAT); } catch (Exception e) { LOG.error("Failed to get heartbeat for worker:{}", workerId, e); return null; } }
java
public WorkerHeartbeat readWorkerHeartbeat(Map conf, String workerId) throws Exception { try { LocalState ls = StormConfig.worker_state(conf, workerId); return (WorkerHeartbeat) ls.get(Common.LS_WORKER_HEARTBEAT); } catch (Exception e) { LOG.error("Failed to get heartbeat for worker:{}", workerId, e); return null; } }
[ "public", "WorkerHeartbeat", "readWorkerHeartbeat", "(", "Map", "conf", ",", "String", "workerId", ")", "throws", "Exception", "{", "try", "{", "LocalState", "ls", "=", "StormConfig", ".", "worker_state", "(", "conf", ",", "workerId", ")", ";", "return", "(", "WorkerHeartbeat", ")", "ls", ".", "get", "(", "Common", ".", "LS_WORKER_HEARTBEAT", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "LOG", ".", "error", "(", "\"Failed to get heartbeat for worker:{}\"", ",", "workerId", ",", "e", ")", ";", "return", "null", ";", "}", "}" ]
get worker heartbeat by workerId @param conf conf @param workerId worker id @return WorkerHeartbeat
[ "get", "worker", "heartbeat", "by", "workerId" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java#L489-L497
25,255
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java
SyncProcessEvent.launchWorker
public void launchWorker(Map conf, IContext sharedContext, String topologyId, String supervisorId, Integer port, String workerId, ConcurrentHashMap<String, String> workerThreadPidsAtom) throws Exception { String pid = UUID.randomUUID().toString(); WorkerShutdown worker = Worker.mk_worker(conf, sharedContext, topologyId, supervisorId, port, workerId, null); ProcessSimulator.registerProcess(pid, worker); workerThreadPidsAtom.put(workerId, pid); }
java
public void launchWorker(Map conf, IContext sharedContext, String topologyId, String supervisorId, Integer port, String workerId, ConcurrentHashMap<String, String> workerThreadPidsAtom) throws Exception { String pid = UUID.randomUUID().toString(); WorkerShutdown worker = Worker.mk_worker(conf, sharedContext, topologyId, supervisorId, port, workerId, null); ProcessSimulator.registerProcess(pid, worker); workerThreadPidsAtom.put(workerId, pid); }
[ "public", "void", "launchWorker", "(", "Map", "conf", ",", "IContext", "sharedContext", ",", "String", "topologyId", ",", "String", "supervisorId", ",", "Integer", "port", ",", "String", "workerId", ",", "ConcurrentHashMap", "<", "String", ",", "String", ">", "workerThreadPidsAtom", ")", "throws", "Exception", "{", "String", "pid", "=", "UUID", ".", "randomUUID", "(", ")", ".", "toString", "(", ")", ";", "WorkerShutdown", "worker", "=", "Worker", ".", "mk_worker", "(", "conf", ",", "sharedContext", ",", "topologyId", ",", "supervisorId", ",", "port", ",", "workerId", ",", "null", ")", ";", "ProcessSimulator", ".", "registerProcess", "(", "pid", ",", "worker", ")", ";", "workerThreadPidsAtom", ".", "put", "(", "workerId", ",", "pid", ")", ";", "}" ]
launch a worker in local mode
[ "launch", "a", "worker", "in", "local", "mode" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java#L502-L509
25,256
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java
SyncProcessEvent.setFilterJars
private Set<String> setFilterJars(Map totalConf) { Set<String> filterJars = new HashSet<>(); boolean enableClassLoader = ConfigExtension.isEnableTopologyClassLoader(totalConf); if (!enableClassLoader) { // avoid logback vs log4j conflict boolean enableLog4j = false; String userDefLog4jConf = ConfigExtension.getUserDefinedLog4jConf(totalConf); if (!StringUtils.isBlank(userDefLog4jConf)) { enableLog4j = true; } if (enableLog4j) { filterJars.add("log4j-over-slf4j"); filterJars.add("logback-core"); filterJars.add("logback-classic"); } else { filterJars.add("slf4j-log4j"); filterJars.add("log4j"); } } String excludeJars = (String) totalConf.get("exclude.jars"); if (!StringUtils.isBlank(excludeJars)) { String[] jars = excludeJars.split(","); for (String jar : jars) { filterJars.add(jar); } } LOG.info("Remove jars " + filterJars); return filterJars; }
java
private Set<String> setFilterJars(Map totalConf) { Set<String> filterJars = new HashSet<>(); boolean enableClassLoader = ConfigExtension.isEnableTopologyClassLoader(totalConf); if (!enableClassLoader) { // avoid logback vs log4j conflict boolean enableLog4j = false; String userDefLog4jConf = ConfigExtension.getUserDefinedLog4jConf(totalConf); if (!StringUtils.isBlank(userDefLog4jConf)) { enableLog4j = true; } if (enableLog4j) { filterJars.add("log4j-over-slf4j"); filterJars.add("logback-core"); filterJars.add("logback-classic"); } else { filterJars.add("slf4j-log4j"); filterJars.add("log4j"); } } String excludeJars = (String) totalConf.get("exclude.jars"); if (!StringUtils.isBlank(excludeJars)) { String[] jars = excludeJars.split(","); for (String jar : jars) { filterJars.add(jar); } } LOG.info("Remove jars " + filterJars); return filterJars; }
[ "private", "Set", "<", "String", ">", "setFilterJars", "(", "Map", "totalConf", ")", "{", "Set", "<", "String", ">", "filterJars", "=", "new", "HashSet", "<>", "(", ")", ";", "boolean", "enableClassLoader", "=", "ConfigExtension", ".", "isEnableTopologyClassLoader", "(", "totalConf", ")", ";", "if", "(", "!", "enableClassLoader", ")", "{", "// avoid logback vs log4j conflict", "boolean", "enableLog4j", "=", "false", ";", "String", "userDefLog4jConf", "=", "ConfigExtension", ".", "getUserDefinedLog4jConf", "(", "totalConf", ")", ";", "if", "(", "!", "StringUtils", ".", "isBlank", "(", "userDefLog4jConf", ")", ")", "{", "enableLog4j", "=", "true", ";", "}", "if", "(", "enableLog4j", ")", "{", "filterJars", ".", "add", "(", "\"log4j-over-slf4j\"", ")", ";", "filterJars", ".", "add", "(", "\"logback-core\"", ")", ";", "filterJars", ".", "add", "(", "\"logback-classic\"", ")", ";", "}", "else", "{", "filterJars", ".", "add", "(", "\"slf4j-log4j\"", ")", ";", "filterJars", ".", "add", "(", "\"log4j\"", ")", ";", "}", "}", "String", "excludeJars", "=", "(", "String", ")", "totalConf", ".", "get", "(", "\"exclude.jars\"", ")", ";", "if", "(", "!", "StringUtils", ".", "isBlank", "(", "excludeJars", ")", ")", "{", "String", "[", "]", "jars", "=", "excludeJars", ".", "split", "(", "\",\"", ")", ";", "for", "(", "String", "jar", ":", "jars", ")", "{", "filterJars", ".", "add", "(", "jar", ")", ";", "}", "}", "LOG", ".", "info", "(", "\"Remove jars \"", "+", "filterJars", ")", ";", "return", "filterJars", ";", "}" ]
filter conflict jar
[ "filter", "conflict", "jar" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java#L512-L546
25,257
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java
SyncProcessEvent.getWorkerMemParameter
public String getWorkerMemParameter(LocalAssignment assignment, Map totalConf, String topologyId, Integer port) { long memSize = assignment.getMem(); long memMinSize = ConfigExtension.getMemMinSizePerWorker(totalConf); long memGsize = memSize / JStormUtils.SIZE_1_G; int gcThreadsNum = memGsize > 4 ? (int) (memGsize * 1.5) : 4; String childOpts = getChildOpts(totalConf); childOpts += getGcDumpParam(topologyId, port, totalConf); StringBuilder commandSB = new StringBuilder(); memMinSize = memMinSize < memSize ? memMinSize : memSize; commandSB.append(" -Xms").append(memMinSize).append(" -Xmx").append(memSize).append(" "); if (memMinSize <= (memSize / 2)) { commandSB.append(" -Xmn").append(memMinSize / 2).append(" "); } else { commandSB.append(" -Xmn").append(memSize / 2).append(" "); } // PermSize is deprecated in jdk1.8 if (JDKInfo.isJdk7() || JDKInfo.lessThanJdk7()) { if (memGsize >= 2) { commandSB.append(" -XX:PermSize=").append(memSize / 32); } else { commandSB.append(" -XX:PermSize=").append(memSize / 16); } commandSB.append(" -XX:MaxPermSize=").append(memSize / 16); } else if (JDKInfo.isJdk8()) { if (memGsize >= 2) { commandSB.append(" -XX:MetaspaceSize=").append(memSize / 32); } else { commandSB.append(" -XX:MetaspaceSize=").append(memSize / 16); } commandSB.append(" -XX:MaxMetaspaceSize=").append(memSize / 16); } commandSB.append(" -XX:ParallelGCThreads=").append(gcThreadsNum); commandSB.append(" ").append(childOpts); if (!StringUtils.isBlank(assignment.getJvm())) { commandSB.append(" ").append(assignment.getJvm()); } return commandSB.toString(); }
java
public String getWorkerMemParameter(LocalAssignment assignment, Map totalConf, String topologyId, Integer port) { long memSize = assignment.getMem(); long memMinSize = ConfigExtension.getMemMinSizePerWorker(totalConf); long memGsize = memSize / JStormUtils.SIZE_1_G; int gcThreadsNum = memGsize > 4 ? (int) (memGsize * 1.5) : 4; String childOpts = getChildOpts(totalConf); childOpts += getGcDumpParam(topologyId, port, totalConf); StringBuilder commandSB = new StringBuilder(); memMinSize = memMinSize < memSize ? memMinSize : memSize; commandSB.append(" -Xms").append(memMinSize).append(" -Xmx").append(memSize).append(" "); if (memMinSize <= (memSize / 2)) { commandSB.append(" -Xmn").append(memMinSize / 2).append(" "); } else { commandSB.append(" -Xmn").append(memSize / 2).append(" "); } // PermSize is deprecated in jdk1.8 if (JDKInfo.isJdk7() || JDKInfo.lessThanJdk7()) { if (memGsize >= 2) { commandSB.append(" -XX:PermSize=").append(memSize / 32); } else { commandSB.append(" -XX:PermSize=").append(memSize / 16); } commandSB.append(" -XX:MaxPermSize=").append(memSize / 16); } else if (JDKInfo.isJdk8()) { if (memGsize >= 2) { commandSB.append(" -XX:MetaspaceSize=").append(memSize / 32); } else { commandSB.append(" -XX:MetaspaceSize=").append(memSize / 16); } commandSB.append(" -XX:MaxMetaspaceSize=").append(memSize / 16); } commandSB.append(" -XX:ParallelGCThreads=").append(gcThreadsNum); commandSB.append(" ").append(childOpts); if (!StringUtils.isBlank(assignment.getJvm())) { commandSB.append(" ").append(assignment.getJvm()); } return commandSB.toString(); }
[ "public", "String", "getWorkerMemParameter", "(", "LocalAssignment", "assignment", ",", "Map", "totalConf", ",", "String", "topologyId", ",", "Integer", "port", ")", "{", "long", "memSize", "=", "assignment", ".", "getMem", "(", ")", ";", "long", "memMinSize", "=", "ConfigExtension", ".", "getMemMinSizePerWorker", "(", "totalConf", ")", ";", "long", "memGsize", "=", "memSize", "/", "JStormUtils", ".", "SIZE_1_G", ";", "int", "gcThreadsNum", "=", "memGsize", ">", "4", "?", "(", "int", ")", "(", "memGsize", "*", "1.5", ")", ":", "4", ";", "String", "childOpts", "=", "getChildOpts", "(", "totalConf", ")", ";", "childOpts", "+=", "getGcDumpParam", "(", "topologyId", ",", "port", ",", "totalConf", ")", ";", "StringBuilder", "commandSB", "=", "new", "StringBuilder", "(", ")", ";", "memMinSize", "=", "memMinSize", "<", "memSize", "?", "memMinSize", ":", "memSize", ";", "commandSB", ".", "append", "(", "\" -Xms\"", ")", ".", "append", "(", "memMinSize", ")", ".", "append", "(", "\" -Xmx\"", ")", ".", "append", "(", "memSize", ")", ".", "append", "(", "\" \"", ")", ";", "if", "(", "memMinSize", "<=", "(", "memSize", "/", "2", ")", ")", "{", "commandSB", ".", "append", "(", "\" -Xmn\"", ")", ".", "append", "(", "memMinSize", "/", "2", ")", ".", "append", "(", "\" \"", ")", ";", "}", "else", "{", "commandSB", ".", "append", "(", "\" -Xmn\"", ")", ".", "append", "(", "memSize", "/", "2", ")", ".", "append", "(", "\" \"", ")", ";", "}", "// PermSize is deprecated in jdk1.8", "if", "(", "JDKInfo", ".", "isJdk7", "(", ")", "||", "JDKInfo", ".", "lessThanJdk7", "(", ")", ")", "{", "if", "(", "memGsize", ">=", "2", ")", "{", "commandSB", ".", "append", "(", "\" -XX:PermSize=\"", ")", ".", "append", "(", "memSize", "/", "32", ")", ";", "}", "else", "{", "commandSB", ".", "append", "(", "\" -XX:PermSize=\"", ")", ".", "append", "(", "memSize", "/", "16", ")", ";", "}", "commandSB", ".", "append", "(", "\" -XX:MaxPermSize=\"", ")", ".", "append", "(", "memSize", "/", "16", ")", ";", "}", "else", "if", "(", "JDKInfo", ".", "isJdk8", "(", ")", ")", "{", "if", "(", "memGsize", ">=", "2", ")", "{", "commandSB", ".", "append", "(", "\" -XX:MetaspaceSize=\"", ")", ".", "append", "(", "memSize", "/", "32", ")", ";", "}", "else", "{", "commandSB", ".", "append", "(", "\" -XX:MetaspaceSize=\"", ")", ".", "append", "(", "memSize", "/", "16", ")", ";", "}", "commandSB", ".", "append", "(", "\" -XX:MaxMetaspaceSize=\"", ")", ".", "append", "(", "memSize", "/", "16", ")", ";", "}", "commandSB", ".", "append", "(", "\" -XX:ParallelGCThreads=\"", ")", ".", "append", "(", "gcThreadsNum", ")", ";", "commandSB", ".", "append", "(", "\" \"", ")", ".", "append", "(", "childOpts", ")", ";", "if", "(", "!", "StringUtils", ".", "isBlank", "(", "assignment", ".", "getJvm", "(", ")", ")", ")", "{", "commandSB", ".", "append", "(", "\" \"", ")", ".", "append", "(", "assignment", ".", "getJvm", "(", ")", ")", ";", "}", "return", "commandSB", ".", "toString", "(", ")", ";", "}" ]
Get worker's JVM memory setting
[ "Get", "worker", "s", "JVM", "memory", "setting" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java#L729-L773
25,258
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java
SyncProcessEvent.getWorkerGcParameter
public String getWorkerGcParameter(LocalAssignment assignment, Map totalConf, String topologyId, Integer port) { StringBuilder commandSB = new StringBuilder(); GcStrategy gcStrategy = GcStrategyManager.getGcStrategy(totalConf); return gcStrategy.toString(); }
java
public String getWorkerGcParameter(LocalAssignment assignment, Map totalConf, String topologyId, Integer port) { StringBuilder commandSB = new StringBuilder(); GcStrategy gcStrategy = GcStrategyManager.getGcStrategy(totalConf); return gcStrategy.toString(); }
[ "public", "String", "getWorkerGcParameter", "(", "LocalAssignment", "assignment", ",", "Map", "totalConf", ",", "String", "topologyId", ",", "Integer", "port", ")", "{", "StringBuilder", "commandSB", "=", "new", "StringBuilder", "(", ")", ";", "GcStrategy", "gcStrategy", "=", "GcStrategyManager", ".", "getGcStrategy", "(", "totalConf", ")", ";", "return", "gcStrategy", ".", "toString", "(", ")", ";", "}" ]
Get worker's JVM gc setting
[ "Get", "worker", "s", "JVM", "gc", "setting" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java#L778-L786
25,259
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java
SyncProcessEvent.launchWorker
public void launchWorker(Map conf, IContext sharedContext, String topologyId, String supervisorId, Integer port, String workerId, LocalAssignment assignment) throws IOException { // get supervisor conf Map stormConf = StormConfig.read_supervisor_topology_conf(conf, topologyId); String stormHome = System.getProperty("jstorm.home"); if (StringUtils.isBlank(stormHome)) { stormHome = "./"; } // get worker conf String topologyRoot = StormConfig.supervisor_stormdist_root(conf, topologyId); Map workerConf = StormConfig.read_topology_conf(topologyRoot, topologyId); Map totalConf = new HashMap(); totalConf.putAll(conf); totalConf.putAll(stormConf); totalConf.putAll(workerConf); /** * don't use environment.putAll(System.getenv); */ Map<String, String> environment = new HashMap<>(); if (ConfigExtension.getWorkerRedirectOutput(totalConf)) { environment.put("REDIRECT", "true"); } else { environment.put("REDIRECT", "false"); } environment.put("LD_LIBRARY_PATH", (String) totalConf.get(Config.JAVA_LIBRARY_PATH)); environment.put("jstorm.home", stormHome); environment.put("jstorm.workerId", workerId); environment.put("jstorm.on.yarn", isJstormOnYarn ? "1" : "0"); String launcherCmd = getLauncherParameter(assignment, totalConf, stormHome, topologyId, port); String workerCmd = getWorkerParameter(assignment, totalConf, stormHome, topologyId, supervisorId, workerId, port); String cmd = launcherCmd + " " + workerCmd; cmd = cmd.replace("%JSTORM_HOME%", stormHome); LOG.info("Launching worker with command: " + cmd); LOG.info("Environment:" + environment.toString()); /** * if run on yarn, set backend false, otherwise set true */ boolean backend = !isJstormOnYarn; LOG.info("backend mode is " + backend); JStormUtils.launchProcess(cmd, environment, backend); }
java
public void launchWorker(Map conf, IContext sharedContext, String topologyId, String supervisorId, Integer port, String workerId, LocalAssignment assignment) throws IOException { // get supervisor conf Map stormConf = StormConfig.read_supervisor_topology_conf(conf, topologyId); String stormHome = System.getProperty("jstorm.home"); if (StringUtils.isBlank(stormHome)) { stormHome = "./"; } // get worker conf String topologyRoot = StormConfig.supervisor_stormdist_root(conf, topologyId); Map workerConf = StormConfig.read_topology_conf(topologyRoot, topologyId); Map totalConf = new HashMap(); totalConf.putAll(conf); totalConf.putAll(stormConf); totalConf.putAll(workerConf); /** * don't use environment.putAll(System.getenv); */ Map<String, String> environment = new HashMap<>(); if (ConfigExtension.getWorkerRedirectOutput(totalConf)) { environment.put("REDIRECT", "true"); } else { environment.put("REDIRECT", "false"); } environment.put("LD_LIBRARY_PATH", (String) totalConf.get(Config.JAVA_LIBRARY_PATH)); environment.put("jstorm.home", stormHome); environment.put("jstorm.workerId", workerId); environment.put("jstorm.on.yarn", isJstormOnYarn ? "1" : "0"); String launcherCmd = getLauncherParameter(assignment, totalConf, stormHome, topologyId, port); String workerCmd = getWorkerParameter(assignment, totalConf, stormHome, topologyId, supervisorId, workerId, port); String cmd = launcherCmd + " " + workerCmd; cmd = cmd.replace("%JSTORM_HOME%", stormHome); LOG.info("Launching worker with command: " + cmd); LOG.info("Environment:" + environment.toString()); /** * if run on yarn, set backend false, otherwise set true */ boolean backend = !isJstormOnYarn; LOG.info("backend mode is " + backend); JStormUtils.launchProcess(cmd, environment, backend); }
[ "public", "void", "launchWorker", "(", "Map", "conf", ",", "IContext", "sharedContext", ",", "String", "topologyId", ",", "String", "supervisorId", ",", "Integer", "port", ",", "String", "workerId", ",", "LocalAssignment", "assignment", ")", "throws", "IOException", "{", "// get supervisor conf", "Map", "stormConf", "=", "StormConfig", ".", "read_supervisor_topology_conf", "(", "conf", ",", "topologyId", ")", ";", "String", "stormHome", "=", "System", ".", "getProperty", "(", "\"jstorm.home\"", ")", ";", "if", "(", "StringUtils", ".", "isBlank", "(", "stormHome", ")", ")", "{", "stormHome", "=", "\"./\"", ";", "}", "// get worker conf", "String", "topologyRoot", "=", "StormConfig", ".", "supervisor_stormdist_root", "(", "conf", ",", "topologyId", ")", ";", "Map", "workerConf", "=", "StormConfig", ".", "read_topology_conf", "(", "topologyRoot", ",", "topologyId", ")", ";", "Map", "totalConf", "=", "new", "HashMap", "(", ")", ";", "totalConf", ".", "putAll", "(", "conf", ")", ";", "totalConf", ".", "putAll", "(", "stormConf", ")", ";", "totalConf", ".", "putAll", "(", "workerConf", ")", ";", "/**\n * don't use environment.putAll(System.getenv);\n */", "Map", "<", "String", ",", "String", ">", "environment", "=", "new", "HashMap", "<>", "(", ")", ";", "if", "(", "ConfigExtension", ".", "getWorkerRedirectOutput", "(", "totalConf", ")", ")", "{", "environment", ".", "put", "(", "\"REDIRECT\"", ",", "\"true\"", ")", ";", "}", "else", "{", "environment", ".", "put", "(", "\"REDIRECT\"", ",", "\"false\"", ")", ";", "}", "environment", ".", "put", "(", "\"LD_LIBRARY_PATH\"", ",", "(", "String", ")", "totalConf", ".", "get", "(", "Config", ".", "JAVA_LIBRARY_PATH", ")", ")", ";", "environment", ".", "put", "(", "\"jstorm.home\"", ",", "stormHome", ")", ";", "environment", ".", "put", "(", "\"jstorm.workerId\"", ",", "workerId", ")", ";", "environment", ".", "put", "(", "\"jstorm.on.yarn\"", ",", "isJstormOnYarn", "?", "\"1\"", ":", "\"0\"", ")", ";", "String", "launcherCmd", "=", "getLauncherParameter", "(", "assignment", ",", "totalConf", ",", "stormHome", ",", "topologyId", ",", "port", ")", ";", "String", "workerCmd", "=", "getWorkerParameter", "(", "assignment", ",", "totalConf", ",", "stormHome", ",", "topologyId", ",", "supervisorId", ",", "workerId", ",", "port", ")", ";", "String", "cmd", "=", "launcherCmd", "+", "\" \"", "+", "workerCmd", ";", "cmd", "=", "cmd", ".", "replace", "(", "\"%JSTORM_HOME%\"", ",", "stormHome", ")", ";", "LOG", ".", "info", "(", "\"Launching worker with command: \"", "+", "cmd", ")", ";", "LOG", ".", "info", "(", "\"Environment:\"", "+", "environment", ".", "toString", "(", ")", ")", ";", "/**\n * if run on yarn, set backend false, otherwise set true\n */", "boolean", "backend", "=", "!", "isJstormOnYarn", ";", "LOG", ".", "info", "(", "\"backend mode is \"", "+", "backend", ")", ";", "JStormUtils", ".", "launchProcess", "(", "cmd", ",", "environment", ",", "backend", ")", ";", "}" ]
launch a worker in distributed mode @throws IOException
[ "launch", "a", "worker", "in", "distributed", "mode" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SyncProcessEvent.java#L895-L947
25,260
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.needsSchedulingTopologies
public List<TopologyDetails> needsSchedulingTopologies(Topologies topologies) { List<TopologyDetails> ret = new ArrayList<>(); for (TopologyDetails topology : topologies.getTopologies()) { if (needsScheduling(topology)) { ret.add(topology); } } return ret; }
java
public List<TopologyDetails> needsSchedulingTopologies(Topologies topologies) { List<TopologyDetails> ret = new ArrayList<>(); for (TopologyDetails topology : topologies.getTopologies()) { if (needsScheduling(topology)) { ret.add(topology); } } return ret; }
[ "public", "List", "<", "TopologyDetails", ">", "needsSchedulingTopologies", "(", "Topologies", "topologies", ")", "{", "List", "<", "TopologyDetails", ">", "ret", "=", "new", "ArrayList", "<>", "(", ")", ";", "for", "(", "TopologyDetails", "topology", ":", "topologies", ".", "getTopologies", "(", ")", ")", "{", "if", "(", "needsScheduling", "(", "topology", ")", ")", "{", "ret", ".", "add", "(", "topology", ")", ";", "}", "}", "return", "ret", ";", "}" ]
Gets all the topologies which needs scheduling.
[ "Gets", "all", "the", "topologies", "which", "needs", "scheduling", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L100-L109
25,261
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.needsScheduling
public boolean needsScheduling(TopologyDetails topology) { int desiredNumWorkers = topology.getNumWorkers(); int assignedNumWorkers = this.getAssignedNumWorkers(topology); if (desiredNumWorkers > assignedNumWorkers) { return true; } return this.getUnassignedExecutors(topology).size() > 0; }
java
public boolean needsScheduling(TopologyDetails topology) { int desiredNumWorkers = topology.getNumWorkers(); int assignedNumWorkers = this.getAssignedNumWorkers(topology); if (desiredNumWorkers > assignedNumWorkers) { return true; } return this.getUnassignedExecutors(topology).size() > 0; }
[ "public", "boolean", "needsScheduling", "(", "TopologyDetails", "topology", ")", "{", "int", "desiredNumWorkers", "=", "topology", ".", "getNumWorkers", "(", ")", ";", "int", "assignedNumWorkers", "=", "this", ".", "getAssignedNumWorkers", "(", "topology", ")", ";", "if", "(", "desiredNumWorkers", ">", "assignedNumWorkers", ")", "{", "return", "true", ";", "}", "return", "this", ".", "getUnassignedExecutors", "(", "topology", ")", ".", "size", "(", ")", ">", "0", ";", "}" ]
Does the topology need scheduling? A topology needs scheduling if one of the following conditions holds: <ul> <li>Although the topology is assigned slots, but is squeezed. i.e. the topology is assigned less slots than desired.</li> <li>There are unassigned executors in this topology</li> </ul>
[ "Does", "the", "topology", "need", "scheduling?" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L120-L128
25,262
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.getNeedsSchedulingExecutorToComponents
public Map<ExecutorDetails, String> getNeedsSchedulingExecutorToComponents(TopologyDetails topology) { Collection<ExecutorDetails> allExecutors = new HashSet<>(topology.getExecutors()); SchedulerAssignment assignment = this.assignments.get(topology.getId()); if (assignment != null) { Collection<ExecutorDetails> assignedExecutors = assignment.getExecutors(); allExecutors.removeAll(assignedExecutors); } return topology.selectExecutorToComponent(allExecutors); }
java
public Map<ExecutorDetails, String> getNeedsSchedulingExecutorToComponents(TopologyDetails topology) { Collection<ExecutorDetails> allExecutors = new HashSet<>(topology.getExecutors()); SchedulerAssignment assignment = this.assignments.get(topology.getId()); if (assignment != null) { Collection<ExecutorDetails> assignedExecutors = assignment.getExecutors(); allExecutors.removeAll(assignedExecutors); } return topology.selectExecutorToComponent(allExecutors); }
[ "public", "Map", "<", "ExecutorDetails", ",", "String", ">", "getNeedsSchedulingExecutorToComponents", "(", "TopologyDetails", "topology", ")", "{", "Collection", "<", "ExecutorDetails", ">", "allExecutors", "=", "new", "HashSet", "<>", "(", "topology", ".", "getExecutors", "(", ")", ")", ";", "SchedulerAssignment", "assignment", "=", "this", ".", "assignments", ".", "get", "(", "topology", ".", "getId", "(", ")", ")", ";", "if", "(", "assignment", "!=", "null", ")", "{", "Collection", "<", "ExecutorDetails", ">", "assignedExecutors", "=", "assignment", ".", "getExecutors", "(", ")", ";", "allExecutors", ".", "removeAll", "(", "assignedExecutors", ")", ";", "}", "return", "topology", ".", "selectExecutorToComponent", "(", "allExecutors", ")", ";", "}" ]
Gets a executor -> component-id map which needs scheduling in this topology.
[ "Gets", "a", "executor", "-", ">", "component", "-", "id", "map", "which", "needs", "scheduling", "in", "this", "topology", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L133-L143
25,263
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.getNeedsSchedulingComponentToExecutors
public Map<String, List<ExecutorDetails>> getNeedsSchedulingComponentToExecutors(TopologyDetails topology) { Map<ExecutorDetails, String> executorToComponents = this.getNeedsSchedulingExecutorToComponents(topology); Map<String, List<ExecutorDetails>> componentToExecutors = new HashMap<>(); for (ExecutorDetails executor : executorToComponents.keySet()) { String component = executorToComponents.get(executor); if (!componentToExecutors.containsKey(component)) { componentToExecutors.put(component, new ArrayList<ExecutorDetails>()); } componentToExecutors.get(component).add(executor); } return componentToExecutors; }
java
public Map<String, List<ExecutorDetails>> getNeedsSchedulingComponentToExecutors(TopologyDetails topology) { Map<ExecutorDetails, String> executorToComponents = this.getNeedsSchedulingExecutorToComponents(topology); Map<String, List<ExecutorDetails>> componentToExecutors = new HashMap<>(); for (ExecutorDetails executor : executorToComponents.keySet()) { String component = executorToComponents.get(executor); if (!componentToExecutors.containsKey(component)) { componentToExecutors.put(component, new ArrayList<ExecutorDetails>()); } componentToExecutors.get(component).add(executor); } return componentToExecutors; }
[ "public", "Map", "<", "String", ",", "List", "<", "ExecutorDetails", ">", ">", "getNeedsSchedulingComponentToExecutors", "(", "TopologyDetails", "topology", ")", "{", "Map", "<", "ExecutorDetails", ",", "String", ">", "executorToComponents", "=", "this", ".", "getNeedsSchedulingExecutorToComponents", "(", "topology", ")", ";", "Map", "<", "String", ",", "List", "<", "ExecutorDetails", ">", ">", "componentToExecutors", "=", "new", "HashMap", "<>", "(", ")", ";", "for", "(", "ExecutorDetails", "executor", ":", "executorToComponents", ".", "keySet", "(", ")", ")", "{", "String", "component", "=", "executorToComponents", ".", "get", "(", "executor", ")", ";", "if", "(", "!", "componentToExecutors", ".", "containsKey", "(", "component", ")", ")", "{", "componentToExecutors", ".", "put", "(", "component", ",", "new", "ArrayList", "<", "ExecutorDetails", ">", "(", ")", ")", ";", "}", "componentToExecutors", ".", "get", "(", "component", ")", ".", "add", "(", "executor", ")", ";", "}", "return", "componentToExecutors", ";", "}" ]
Gets a component-id -> executors map which needs scheduling in this topology.
[ "Gets", "a", "component", "-", "id", "-", ">", "executors", "map", "which", "needs", "scheduling", "in", "this", "topology", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L148-L161
25,264
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.getUsedPorts
public Set<Integer> getUsedPorts(SupervisorDetails supervisor) { Map<String, SchedulerAssignment> assignments = this.getAssignments(); Set<Integer> usedPorts = new HashSet<>(); for (SchedulerAssignment assignment : assignments.values()) { for (WorkerSlot slot : assignment.getExecutorToSlot().values()) { if (slot.getNodeId().equals(supervisor.getId())) { usedPorts.add(slot.getPort()); } } } return usedPorts; }
java
public Set<Integer> getUsedPorts(SupervisorDetails supervisor) { Map<String, SchedulerAssignment> assignments = this.getAssignments(); Set<Integer> usedPorts = new HashSet<>(); for (SchedulerAssignment assignment : assignments.values()) { for (WorkerSlot slot : assignment.getExecutorToSlot().values()) { if (slot.getNodeId().equals(supervisor.getId())) { usedPorts.add(slot.getPort()); } } } return usedPorts; }
[ "public", "Set", "<", "Integer", ">", "getUsedPorts", "(", "SupervisorDetails", "supervisor", ")", "{", "Map", "<", "String", ",", "SchedulerAssignment", ">", "assignments", "=", "this", ".", "getAssignments", "(", ")", ";", "Set", "<", "Integer", ">", "usedPorts", "=", "new", "HashSet", "<>", "(", ")", ";", "for", "(", "SchedulerAssignment", "assignment", ":", "assignments", ".", "values", "(", ")", ")", "{", "for", "(", "WorkerSlot", "slot", ":", "assignment", ".", "getExecutorToSlot", "(", ")", ".", "values", "(", ")", ")", "{", "if", "(", "slot", ".", "getNodeId", "(", ")", ".", "equals", "(", "supervisor", ".", "getId", "(", ")", ")", ")", "{", "usedPorts", ".", "add", "(", "slot", ".", "getPort", "(", ")", ")", ";", "}", "}", "}", "return", "usedPorts", ";", "}" ]
Get all the used ports of this supervisor.
[ "Get", "all", "the", "used", "ports", "of", "this", "supervisor", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L166-L179
25,265
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.getAvailablePorts
public Set<Integer> getAvailablePorts(SupervisorDetails supervisor) { Set<Integer> usedPorts = this.getUsedPorts(supervisor); Set<Integer> ret = new HashSet<>(); ret.addAll(getAssignablePorts(supervisor)); ret.removeAll(usedPorts); return ret; }
java
public Set<Integer> getAvailablePorts(SupervisorDetails supervisor) { Set<Integer> usedPorts = this.getUsedPorts(supervisor); Set<Integer> ret = new HashSet<>(); ret.addAll(getAssignablePorts(supervisor)); ret.removeAll(usedPorts); return ret; }
[ "public", "Set", "<", "Integer", ">", "getAvailablePorts", "(", "SupervisorDetails", "supervisor", ")", "{", "Set", "<", "Integer", ">", "usedPorts", "=", "this", ".", "getUsedPorts", "(", "supervisor", ")", ";", "Set", "<", "Integer", ">", "ret", "=", "new", "HashSet", "<>", "(", ")", ";", "ret", ".", "addAll", "(", "getAssignablePorts", "(", "supervisor", ")", ")", ";", "ret", ".", "removeAll", "(", "usedPorts", ")", ";", "return", "ret", ";", "}" ]
Return the available ports of this supervisor.
[ "Return", "the", "available", "ports", "of", "this", "supervisor", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L184-L192
25,266
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.getAvailableSlots
public List<WorkerSlot> getAvailableSlots(SupervisorDetails supervisor) { Set<Integer> ports = this.getAvailablePorts(supervisor); List<WorkerSlot> slots = new ArrayList<>(ports.size()); for (Integer port : ports) { slots.add(new WorkerSlot(supervisor.getId(), port)); } return slots; }
java
public List<WorkerSlot> getAvailableSlots(SupervisorDetails supervisor) { Set<Integer> ports = this.getAvailablePorts(supervisor); List<WorkerSlot> slots = new ArrayList<>(ports.size()); for (Integer port : ports) { slots.add(new WorkerSlot(supervisor.getId(), port)); } return slots; }
[ "public", "List", "<", "WorkerSlot", ">", "getAvailableSlots", "(", "SupervisorDetails", "supervisor", ")", "{", "Set", "<", "Integer", ">", "ports", "=", "this", ".", "getAvailablePorts", "(", "supervisor", ")", ";", "List", "<", "WorkerSlot", ">", "slots", "=", "new", "ArrayList", "<>", "(", "ports", ".", "size", "(", ")", ")", ";", "for", "(", "Integer", "port", ":", "ports", ")", "{", "slots", ".", "add", "(", "new", "WorkerSlot", "(", "supervisor", ".", "getId", "(", ")", ",", "port", ")", ")", ";", "}", "return", "slots", ";", "}" ]
Return all the available slots on this supervisor.
[ "Return", "all", "the", "available", "slots", "on", "this", "supervisor", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L203-L212
25,267
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.getUnassignedExecutors
public Collection<ExecutorDetails> getUnassignedExecutors(TopologyDetails topology) { if (topology == null) { return new ArrayList<>(0); } Collection<ExecutorDetails> ret = new HashSet<>(topology.getExecutors()); SchedulerAssignment assignment = this.getAssignmentById(topology.getId()); if (assignment != null) { Set<ExecutorDetails> assignedExecutors = assignment.getExecutors(); ret.removeAll(assignedExecutors); } return ret; }
java
public Collection<ExecutorDetails> getUnassignedExecutors(TopologyDetails topology) { if (topology == null) { return new ArrayList<>(0); } Collection<ExecutorDetails> ret = new HashSet<>(topology.getExecutors()); SchedulerAssignment assignment = this.getAssignmentById(topology.getId()); if (assignment != null) { Set<ExecutorDetails> assignedExecutors = assignment.getExecutors(); ret.removeAll(assignedExecutors); } return ret; }
[ "public", "Collection", "<", "ExecutorDetails", ">", "getUnassignedExecutors", "(", "TopologyDetails", "topology", ")", "{", "if", "(", "topology", "==", "null", ")", "{", "return", "new", "ArrayList", "<>", "(", "0", ")", ";", "}", "Collection", "<", "ExecutorDetails", ">", "ret", "=", "new", "HashSet", "<>", "(", "topology", ".", "getExecutors", "(", ")", ")", ";", "SchedulerAssignment", "assignment", "=", "this", ".", "getAssignmentById", "(", "topology", ".", "getId", "(", ")", ")", ";", "if", "(", "assignment", "!=", "null", ")", "{", "Set", "<", "ExecutorDetails", ">", "assignedExecutors", "=", "assignment", ".", "getExecutors", "(", ")", ";", "ret", ".", "removeAll", "(", "assignedExecutors", ")", ";", "}", "return", "ret", ";", "}" ]
get the unassigned executors of the topology.
[ "get", "the", "unassigned", "executors", "of", "the", "topology", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L228-L242
25,268
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.getAssignedNumWorkers
public int getAssignedNumWorkers(TopologyDetails topology) { SchedulerAssignment assignment = this.getAssignmentById(topology.getId()); if (assignment == null) { return 0; } Set<WorkerSlot> slots = new HashSet<>(); slots.addAll(assignment.getExecutorToSlot().values()); return slots.size(); }
java
public int getAssignedNumWorkers(TopologyDetails topology) { SchedulerAssignment assignment = this.getAssignmentById(topology.getId()); if (assignment == null) { return 0; } Set<WorkerSlot> slots = new HashSet<>(); slots.addAll(assignment.getExecutorToSlot().values()); return slots.size(); }
[ "public", "int", "getAssignedNumWorkers", "(", "TopologyDetails", "topology", ")", "{", "SchedulerAssignment", "assignment", "=", "this", ".", "getAssignmentById", "(", "topology", ".", "getId", "(", ")", ")", ";", "if", "(", "assignment", "==", "null", ")", "{", "return", "0", ";", "}", "Set", "<", "WorkerSlot", ">", "slots", "=", "new", "HashSet", "<>", "(", ")", ";", "slots", ".", "addAll", "(", "assignment", ".", "getExecutorToSlot", "(", ")", ".", "values", "(", ")", ")", ";", "return", "slots", ".", "size", "(", ")", ";", "}" ]
Gets the number of workers assigned to this topology.
[ "Gets", "the", "number", "of", "workers", "assigned", "to", "this", "topology", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L247-L257
25,269
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.assign
public void assign(WorkerSlot slot, String topologyId, Collection<ExecutorDetails> executors) { if (this.isSlotOccupied(slot)) { throw new RuntimeException("slot: [" + slot.getNodeId() + ", " + slot.getPort() + "] is already occupied."); } SchedulerAssignmentImpl assignment = (SchedulerAssignmentImpl) this.getAssignmentById(topologyId); if (assignment == null) { assignment = new SchedulerAssignmentImpl(topologyId, new HashMap<ExecutorDetails, WorkerSlot>()); this.assignments.put(topologyId, assignment); } else { for (ExecutorDetails executor : executors) { if (assignment.isExecutorAssigned(executor)) { throw new RuntimeException("the executor is already assigned, you should un-assign it first " + "before assign it to another slot."); } } } assignment.assign(slot, executors); }
java
public void assign(WorkerSlot slot, String topologyId, Collection<ExecutorDetails> executors) { if (this.isSlotOccupied(slot)) { throw new RuntimeException("slot: [" + slot.getNodeId() + ", " + slot.getPort() + "] is already occupied."); } SchedulerAssignmentImpl assignment = (SchedulerAssignmentImpl) this.getAssignmentById(topologyId); if (assignment == null) { assignment = new SchedulerAssignmentImpl(topologyId, new HashMap<ExecutorDetails, WorkerSlot>()); this.assignments.put(topologyId, assignment); } else { for (ExecutorDetails executor : executors) { if (assignment.isExecutorAssigned(executor)) { throw new RuntimeException("the executor is already assigned, you should un-assign it first " + "before assign it to another slot."); } } } assignment.assign(slot, executors); }
[ "public", "void", "assign", "(", "WorkerSlot", "slot", ",", "String", "topologyId", ",", "Collection", "<", "ExecutorDetails", ">", "executors", ")", "{", "if", "(", "this", ".", "isSlotOccupied", "(", "slot", ")", ")", "{", "throw", "new", "RuntimeException", "(", "\"slot: [\"", "+", "slot", ".", "getNodeId", "(", ")", "+", "\", \"", "+", "slot", ".", "getPort", "(", ")", "+", "\"] is already occupied.\"", ")", ";", "}", "SchedulerAssignmentImpl", "assignment", "=", "(", "SchedulerAssignmentImpl", ")", "this", ".", "getAssignmentById", "(", "topologyId", ")", ";", "if", "(", "assignment", "==", "null", ")", "{", "assignment", "=", "new", "SchedulerAssignmentImpl", "(", "topologyId", ",", "new", "HashMap", "<", "ExecutorDetails", ",", "WorkerSlot", ">", "(", ")", ")", ";", "this", ".", "assignments", ".", "put", "(", "topologyId", ",", "assignment", ")", ";", "}", "else", "{", "for", "(", "ExecutorDetails", "executor", ":", "executors", ")", "{", "if", "(", "assignment", ".", "isExecutorAssigned", "(", "executor", ")", ")", "{", "throw", "new", "RuntimeException", "(", "\"the executor is already assigned, you should un-assign it first \"", "+", "\"before assign it to another slot.\"", ")", ";", "}", "}", "}", "assignment", ".", "assign", "(", "slot", ",", "executors", ")", ";", "}" ]
Assign the slot to the executors for this topology. @throws RuntimeException if the specified slot is already occupied.
[ "Assign", "the", "slot", "to", "the", "executors", "for", "this", "topology", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L264-L282
25,270
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.getAvailableSlots
public List<WorkerSlot> getAvailableSlots() { List<WorkerSlot> slots = new ArrayList<>(); for (SupervisorDetails supervisor : this.supervisors.values()) { slots.addAll(this.getAvailableSlots(supervisor)); } return slots; }
java
public List<WorkerSlot> getAvailableSlots() { List<WorkerSlot> slots = new ArrayList<>(); for (SupervisorDetails supervisor : this.supervisors.values()) { slots.addAll(this.getAvailableSlots(supervisor)); } return slots; }
[ "public", "List", "<", "WorkerSlot", ">", "getAvailableSlots", "(", ")", "{", "List", "<", "WorkerSlot", ">", "slots", "=", "new", "ArrayList", "<>", "(", ")", ";", "for", "(", "SupervisorDetails", "supervisor", ":", "this", ".", "supervisors", ".", "values", "(", ")", ")", "{", "slots", ".", "addAll", "(", "this", ".", "getAvailableSlots", "(", "supervisor", ")", ")", ";", "}", "return", "slots", ";", "}" ]
Gets all the available slots in the cluster.
[ "Gets", "all", "the", "available", "slots", "in", "the", "cluster", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L287-L294
25,271
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.freeSlot
public void freeSlot(WorkerSlot slot) { // remove the slot from the existing assignments for (SchedulerAssignmentImpl assignment : this.assignments.values()) { if (assignment.isSlotOccupied(slot)) { assignment.unassignBySlot(slot); } } }
java
public void freeSlot(WorkerSlot slot) { // remove the slot from the existing assignments for (SchedulerAssignmentImpl assignment : this.assignments.values()) { if (assignment.isSlotOccupied(slot)) { assignment.unassignBySlot(slot); } } }
[ "public", "void", "freeSlot", "(", "WorkerSlot", "slot", ")", "{", "// remove the slot from the existing assignments", "for", "(", "SchedulerAssignmentImpl", "assignment", ":", "this", ".", "assignments", ".", "values", "(", ")", ")", "{", "if", "(", "assignment", ".", "isSlotOccupied", "(", "slot", ")", ")", "{", "assignment", ".", "unassignBySlot", "(", "slot", ")", ";", "}", "}", "}" ]
Free the specified slot.
[ "Free", "the", "specified", "slot", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L308-L315
25,272
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.freeSlots
public void freeSlots(Collection<WorkerSlot> slots) { if (slots != null) { for (WorkerSlot slot : slots) { this.freeSlot(slot); } } }
java
public void freeSlots(Collection<WorkerSlot> slots) { if (slots != null) { for (WorkerSlot slot : slots) { this.freeSlot(slot); } } }
[ "public", "void", "freeSlots", "(", "Collection", "<", "WorkerSlot", ">", "slots", ")", "{", "if", "(", "slots", "!=", "null", ")", "{", "for", "(", "WorkerSlot", "slot", ":", "slots", ")", "{", "this", ".", "freeSlot", "(", "slot", ")", ";", "}", "}", "}" ]
free the slots.
[ "free", "the", "slots", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L320-L326
25,273
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.isSlotOccupied
public boolean isSlotOccupied(WorkerSlot slot) { for (SchedulerAssignment assignment : this.assignments.values()) { if (assignment.isSlotOccupied(slot)) { return true; } } return false; }
java
public boolean isSlotOccupied(WorkerSlot slot) { for (SchedulerAssignment assignment : this.assignments.values()) { if (assignment.isSlotOccupied(slot)) { return true; } } return false; }
[ "public", "boolean", "isSlotOccupied", "(", "WorkerSlot", "slot", ")", "{", "for", "(", "SchedulerAssignment", "assignment", ":", "this", ".", "assignments", ".", "values", "(", ")", ")", "{", "if", "(", "assignment", ".", "isSlotOccupied", "(", "slot", ")", ")", "{", "return", "true", ";", "}", "}", "return", "false", ";", "}" ]
Checks the specified slot is occupied. @param slot the slot be to checked.
[ "Checks", "the", "specified", "slot", "is", "occupied", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L333-L341
25,274
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.getAssignmentById
public SchedulerAssignment getAssignmentById(String topologyId) { if (this.assignments.containsKey(topologyId)) { return this.assignments.get(topologyId); } return null; }
java
public SchedulerAssignment getAssignmentById(String topologyId) { if (this.assignments.containsKey(topologyId)) { return this.assignments.get(topologyId); } return null; }
[ "public", "SchedulerAssignment", "getAssignmentById", "(", "String", "topologyId", ")", "{", "if", "(", "this", ".", "assignments", ".", "containsKey", "(", "topologyId", ")", ")", "{", "return", "this", ".", "assignments", ".", "get", "(", "topologyId", ")", ";", "}", "return", "null", ";", "}" ]
get the current assignment for the topology.
[ "get", "the", "current", "assignment", "for", "the", "topology", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L346-L351
25,275
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java
Cluster.getAssignments
public Map<String, SchedulerAssignment> getAssignments() { Map<String, SchedulerAssignment> ret = new HashMap<>(this.assignments.size()); for (String topologyId : this.assignments.keySet()) { ret.put(topologyId, this.assignments.get(topologyId)); } return ret; }
java
public Map<String, SchedulerAssignment> getAssignments() { Map<String, SchedulerAssignment> ret = new HashMap<>(this.assignments.size()); for (String topologyId : this.assignments.keySet()) { ret.put(topologyId, this.assignments.get(topologyId)); } return ret; }
[ "public", "Map", "<", "String", ",", "SchedulerAssignment", ">", "getAssignments", "(", ")", "{", "Map", "<", "String", ",", "SchedulerAssignment", ">", "ret", "=", "new", "HashMap", "<>", "(", "this", ".", "assignments", ".", "size", "(", ")", ")", ";", "for", "(", "String", "topologyId", ":", "this", ".", "assignments", ".", "keySet", "(", ")", ")", "{", "ret", ".", "put", "(", "topologyId", ",", "this", ".", "assignments", ".", "get", "(", "topologyId", ")", ")", ";", "}", "return", "ret", ";", "}" ]
Get all the assignments.
[ "Get", "all", "the", "assignments", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/scheduler/Cluster.java#L394-L402
25,276
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/DefaultHttpCredentialsPlugin.java
DefaultHttpCredentialsPlugin.getUserName
@Override public String getUserName(HttpServletRequest req) { Principal princ = null; if (req != null && (princ = req.getUserPrincipal()) != null) { String userName = princ.getName(); if (userName != null && !userName.isEmpty()) { LOG.debug("HTTP request had user (" + userName + ")"); return userName; } } return null; }
java
@Override public String getUserName(HttpServletRequest req) { Principal princ = null; if (req != null && (princ = req.getUserPrincipal()) != null) { String userName = princ.getName(); if (userName != null && !userName.isEmpty()) { LOG.debug("HTTP request had user (" + userName + ")"); return userName; } } return null; }
[ "@", "Override", "public", "String", "getUserName", "(", "HttpServletRequest", "req", ")", "{", "Principal", "princ", "=", "null", ";", "if", "(", "req", "!=", "null", "&&", "(", "princ", "=", "req", ".", "getUserPrincipal", "(", ")", ")", "!=", "null", ")", "{", "String", "userName", "=", "princ", ".", "getName", "(", ")", ";", "if", "(", "userName", "!=", "null", "&&", "!", "userName", ".", "isEmpty", "(", ")", ")", "{", "LOG", ".", "debug", "(", "\"HTTP request had user (\"", "+", "userName", "+", "\")\"", ")", ";", "return", "userName", ";", "}", "}", "return", "null", ";", "}" ]
Gets the user name from the request principal. @param req the servlet request @return the authenticated user, or null if none is authenticated
[ "Gets", "the", "user", "name", "from", "the", "request", "principal", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/DefaultHttpCredentialsPlugin.java#L52-L63
25,277
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/security/auth/DefaultHttpCredentialsPlugin.java
DefaultHttpCredentialsPlugin.populateContext
@Override public ReqContext populateContext(ReqContext context, HttpServletRequest req) { String userName = getUserName(req); String doAsUser = req.getHeader("doAsUser"); if (doAsUser == null) { doAsUser = req.getParameter("doAsUser"); } if (doAsUser != null) { context.setRealPrincipal(new SingleUserPrincipal(userName)); userName = doAsUser; } Set<Principal> principals = new HashSet<Principal>(); if (userName != null) { Principal p = new SingleUserPrincipal(userName); principals.add(p); } Subject s = new Subject(true, principals, new HashSet(), new HashSet()); context.setSubject(s); return context; }
java
@Override public ReqContext populateContext(ReqContext context, HttpServletRequest req) { String userName = getUserName(req); String doAsUser = req.getHeader("doAsUser"); if (doAsUser == null) { doAsUser = req.getParameter("doAsUser"); } if (doAsUser != null) { context.setRealPrincipal(new SingleUserPrincipal(userName)); userName = doAsUser; } Set<Principal> principals = new HashSet<Principal>(); if (userName != null) { Principal p = new SingleUserPrincipal(userName); principals.add(p); } Subject s = new Subject(true, principals, new HashSet(), new HashSet()); context.setSubject(s); return context; }
[ "@", "Override", "public", "ReqContext", "populateContext", "(", "ReqContext", "context", ",", "HttpServletRequest", "req", ")", "{", "String", "userName", "=", "getUserName", "(", "req", ")", ";", "String", "doAsUser", "=", "req", ".", "getHeader", "(", "\"doAsUser\"", ")", ";", "if", "(", "doAsUser", "==", "null", ")", "{", "doAsUser", "=", "req", ".", "getParameter", "(", "\"doAsUser\"", ")", ";", "}", "if", "(", "doAsUser", "!=", "null", ")", "{", "context", ".", "setRealPrincipal", "(", "new", "SingleUserPrincipal", "(", "userName", ")", ")", ";", "userName", "=", "doAsUser", ";", "}", "Set", "<", "Principal", ">", "principals", "=", "new", "HashSet", "<", "Principal", ">", "(", ")", ";", "if", "(", "userName", "!=", "null", ")", "{", "Principal", "p", "=", "new", "SingleUserPrincipal", "(", "userName", ")", ";", "principals", ".", "add", "(", "p", ")", ";", "}", "Subject", "s", "=", "new", "Subject", "(", "true", ",", "principals", ",", "new", "HashSet", "(", ")", ",", "new", "HashSet", "(", ")", ")", ";", "context", ".", "setSubject", "(", "s", ")", ";", "return", "context", ";", "}" ]
Populates a given context with a new Subject derived from the credentials in a servlet request. @param context the context to be populated @param req the servlet request @return the context
[ "Populates", "a", "given", "context", "with", "a", "new", "Subject", "derived", "from", "the", "credentials", "in", "a", "servlet", "request", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/security/auth/DefaultHttpCredentialsPlugin.java#L72-L95
25,278
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/task/master/GrayUpgradeHandler.java
GrayUpgradeHandler.run
@Override public void run() { try { GrayUpgradeConfig grayUpgradeConf = (GrayUpgradeConfig) stormClusterState.get_gray_upgrade_conf(topologyId); // no upgrade request if (grayUpgradeConf == null) { LOG.debug("gray upgrade conf is null, skip..."); return; } if (grayUpgradeConf.isCompleted() && !grayUpgradeConf.isRollback()) { LOG.debug("detected a complete upgrade, skip..."); return; } if (grayUpgradeConf.isExpired() && !grayUpgradeConf.isRollback()) { LOG.info("detected an expired upgrade, completing..."); // todo: should we check all task status? GrayUpgradeConfig.completeUpgrade(grayUpgradeConf); //stormClusterState.remove_gray_upgrade_info(topologyId); stormClusterState.set_gray_upgrade_conf(topologyId, grayUpgradeConf); stormClusterState.update_storm(topologyId, new StormStatus(StatusType.active)); return; } // first time, set workers if (this.totalWorkers.size() == 0) { setTotalWorkers(tmContext); } // notify current upgrading workers to upgrade (again) Set<String> upgradingWorkers = Sets.newHashSet(stormClusterState.get_upgrading_workers(topologyId)); if (upgradingWorkers.size() > 0) { LOG.info("Following workers are under upgrade:{}", upgradingWorkers); for (String worker : upgradingWorkers) { notifyToUpgrade(worker); } return; } Set<String> upgradedWorkers = Sets.newHashSet(stormClusterState.get_upgraded_workers(topologyId)); if (grayUpgradeConf.isRollback()) { LOG.info("Rollback has completed, removing upgrade info in zk and updating storm status..."); // there's no way back after a rollback stormClusterState.remove_gray_upgrade_info(topologyId); stormClusterState.update_storm(topologyId, new StormStatus(StatusType.active)); return; } if (isUpgradeCompleted(upgradedWorkers, totalWorkers)) { LOG.info("This upgraded has finished! Marking upgrade config as completed..."); GrayUpgradeConfig.completeUpgrade(grayUpgradeConf); stormClusterState.set_gray_upgrade_conf(topologyId, grayUpgradeConf); //stormClusterState.remove_gray_upgrade_info(topologyId); stormClusterState.update_storm(topologyId, new StormStatus(StatusType.active)); return; } // assign next batch of workers if (grayUpgradeConf.continueUpgrading()) { pickWorkersToUpgrade(grayUpgradeConf, upgradedWorkers); } // pause upgrading grayUpgradeConf.setContinueUpgrade(false); stormClusterState.set_gray_upgrade_conf(topologyId, grayUpgradeConf); } catch (Exception ex) { LOG.error("Failed to get upgrade config from zk, will abort this upgrade...", ex); recover(); } }
java
@Override public void run() { try { GrayUpgradeConfig grayUpgradeConf = (GrayUpgradeConfig) stormClusterState.get_gray_upgrade_conf(topologyId); // no upgrade request if (grayUpgradeConf == null) { LOG.debug("gray upgrade conf is null, skip..."); return; } if (grayUpgradeConf.isCompleted() && !grayUpgradeConf.isRollback()) { LOG.debug("detected a complete upgrade, skip..."); return; } if (grayUpgradeConf.isExpired() && !grayUpgradeConf.isRollback()) { LOG.info("detected an expired upgrade, completing..."); // todo: should we check all task status? GrayUpgradeConfig.completeUpgrade(grayUpgradeConf); //stormClusterState.remove_gray_upgrade_info(topologyId); stormClusterState.set_gray_upgrade_conf(topologyId, grayUpgradeConf); stormClusterState.update_storm(topologyId, new StormStatus(StatusType.active)); return; } // first time, set workers if (this.totalWorkers.size() == 0) { setTotalWorkers(tmContext); } // notify current upgrading workers to upgrade (again) Set<String> upgradingWorkers = Sets.newHashSet(stormClusterState.get_upgrading_workers(topologyId)); if (upgradingWorkers.size() > 0) { LOG.info("Following workers are under upgrade:{}", upgradingWorkers); for (String worker : upgradingWorkers) { notifyToUpgrade(worker); } return; } Set<String> upgradedWorkers = Sets.newHashSet(stormClusterState.get_upgraded_workers(topologyId)); if (grayUpgradeConf.isRollback()) { LOG.info("Rollback has completed, removing upgrade info in zk and updating storm status..."); // there's no way back after a rollback stormClusterState.remove_gray_upgrade_info(topologyId); stormClusterState.update_storm(topologyId, new StormStatus(StatusType.active)); return; } if (isUpgradeCompleted(upgradedWorkers, totalWorkers)) { LOG.info("This upgraded has finished! Marking upgrade config as completed..."); GrayUpgradeConfig.completeUpgrade(grayUpgradeConf); stormClusterState.set_gray_upgrade_conf(topologyId, grayUpgradeConf); //stormClusterState.remove_gray_upgrade_info(topologyId); stormClusterState.update_storm(topologyId, new StormStatus(StatusType.active)); return; } // assign next batch of workers if (grayUpgradeConf.continueUpgrading()) { pickWorkersToUpgrade(grayUpgradeConf, upgradedWorkers); } // pause upgrading grayUpgradeConf.setContinueUpgrade(false); stormClusterState.set_gray_upgrade_conf(topologyId, grayUpgradeConf); } catch (Exception ex) { LOG.error("Failed to get upgrade config from zk, will abort this upgrade...", ex); recover(); } }
[ "@", "Override", "public", "void", "run", "(", ")", "{", "try", "{", "GrayUpgradeConfig", "grayUpgradeConf", "=", "(", "GrayUpgradeConfig", ")", "stormClusterState", ".", "get_gray_upgrade_conf", "(", "topologyId", ")", ";", "// no upgrade request", "if", "(", "grayUpgradeConf", "==", "null", ")", "{", "LOG", ".", "debug", "(", "\"gray upgrade conf is null, skip...\"", ")", ";", "return", ";", "}", "if", "(", "grayUpgradeConf", ".", "isCompleted", "(", ")", "&&", "!", "grayUpgradeConf", ".", "isRollback", "(", ")", ")", "{", "LOG", ".", "debug", "(", "\"detected a complete upgrade, skip...\"", ")", ";", "return", ";", "}", "if", "(", "grayUpgradeConf", ".", "isExpired", "(", ")", "&&", "!", "grayUpgradeConf", ".", "isRollback", "(", ")", ")", "{", "LOG", ".", "info", "(", "\"detected an expired upgrade, completing...\"", ")", ";", "// todo: should we check all task status?", "GrayUpgradeConfig", ".", "completeUpgrade", "(", "grayUpgradeConf", ")", ";", "//stormClusterState.remove_gray_upgrade_info(topologyId);", "stormClusterState", ".", "set_gray_upgrade_conf", "(", "topologyId", ",", "grayUpgradeConf", ")", ";", "stormClusterState", ".", "update_storm", "(", "topologyId", ",", "new", "StormStatus", "(", "StatusType", ".", "active", ")", ")", ";", "return", ";", "}", "// first time, set workers", "if", "(", "this", ".", "totalWorkers", ".", "size", "(", ")", "==", "0", ")", "{", "setTotalWorkers", "(", "tmContext", ")", ";", "}", "// notify current upgrading workers to upgrade (again)", "Set", "<", "String", ">", "upgradingWorkers", "=", "Sets", ".", "newHashSet", "(", "stormClusterState", ".", "get_upgrading_workers", "(", "topologyId", ")", ")", ";", "if", "(", "upgradingWorkers", ".", "size", "(", ")", ">", "0", ")", "{", "LOG", ".", "info", "(", "\"Following workers are under upgrade:{}\"", ",", "upgradingWorkers", ")", ";", "for", "(", "String", "worker", ":", "upgradingWorkers", ")", "{", "notifyToUpgrade", "(", "worker", ")", ";", "}", "return", ";", "}", "Set", "<", "String", ">", "upgradedWorkers", "=", "Sets", ".", "newHashSet", "(", "stormClusterState", ".", "get_upgraded_workers", "(", "topologyId", ")", ")", ";", "if", "(", "grayUpgradeConf", ".", "isRollback", "(", ")", ")", "{", "LOG", ".", "info", "(", "\"Rollback has completed, removing upgrade info in zk and updating storm status...\"", ")", ";", "// there's no way back after a rollback", "stormClusterState", ".", "remove_gray_upgrade_info", "(", "topologyId", ")", ";", "stormClusterState", ".", "update_storm", "(", "topologyId", ",", "new", "StormStatus", "(", "StatusType", ".", "active", ")", ")", ";", "return", ";", "}", "if", "(", "isUpgradeCompleted", "(", "upgradedWorkers", ",", "totalWorkers", ")", ")", "{", "LOG", ".", "info", "(", "\"This upgraded has finished! Marking upgrade config as completed...\"", ")", ";", "GrayUpgradeConfig", ".", "completeUpgrade", "(", "grayUpgradeConf", ")", ";", "stormClusterState", ".", "set_gray_upgrade_conf", "(", "topologyId", ",", "grayUpgradeConf", ")", ";", "//stormClusterState.remove_gray_upgrade_info(topologyId);", "stormClusterState", ".", "update_storm", "(", "topologyId", ",", "new", "StormStatus", "(", "StatusType", ".", "active", ")", ")", ";", "return", ";", "}", "// assign next batch of workers", "if", "(", "grayUpgradeConf", ".", "continueUpgrading", "(", ")", ")", "{", "pickWorkersToUpgrade", "(", "grayUpgradeConf", ",", "upgradedWorkers", ")", ";", "}", "// pause upgrading", "grayUpgradeConf", ".", "setContinueUpgrade", "(", "false", ")", ";", "stormClusterState", ".", "set_gray_upgrade_conf", "(", "topologyId", ",", "grayUpgradeConf", ")", ";", "}", "catch", "(", "Exception", "ex", ")", "{", "LOG", ".", "error", "(", "\"Failed to get upgrade config from zk, will abort this upgrade...\"", ",", "ex", ")", ";", "recover", "(", ")", ";", "}", "}" ]
scheduled runnable callback, called periodically
[ "scheduled", "runnable", "callback", "called", "periodically" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/task/master/GrayUpgradeHandler.java#L86-L157
25,279
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/utils/disruptor/RingBuffer.java
RingBuffer.tryPublishEvent
public boolean tryPublishEvent(EventTranslatorVararg<E> translator, Object... args) { try { final long sequence = sequencer.tryNext(); translateAndPublish(translator, sequence, args); return true; } catch (InsufficientCapacityException e) { return false; } }
java
public boolean tryPublishEvent(EventTranslatorVararg<E> translator, Object... args) { try { final long sequence = sequencer.tryNext(); translateAndPublish(translator, sequence, args); return true; } catch (InsufficientCapacityException e) { return false; } }
[ "public", "boolean", "tryPublishEvent", "(", "EventTranslatorVararg", "<", "E", ">", "translator", ",", "Object", "...", "args", ")", "{", "try", "{", "final", "long", "sequence", "=", "sequencer", ".", "tryNext", "(", ")", ";", "translateAndPublish", "(", "translator", ",", "sequence", ",", "args", ")", ";", "return", "true", ";", "}", "catch", "(", "InsufficientCapacityException", "e", ")", "{", "return", "false", ";", "}", "}" ]
Allows a variable number of user supplied arguments @see #publishEvent(EventTranslator) @param translator The user specified translation for the event @param args User supplied arguments. @return true if the value was published, false if there was insufficient capacity.
[ "Allows", "a", "variable", "number", "of", "user", "supplied", "arguments" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/utils/disruptor/RingBuffer.java#L499-L507
25,280
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/utils/disruptor/RingBuffer.java
RingBuffer.publishEvents
public <A> void publishEvents(EventTranslatorOneArg<E, A> translator, int batchStartsAt, int batchSize, A[] arg0) { checkBounds(arg0, batchStartsAt, batchSize); final long finalSequence = sequencer.next(batchSize); translateAndPublishBatch(translator, arg0, batchStartsAt, batchSize, finalSequence); }
java
public <A> void publishEvents(EventTranslatorOneArg<E, A> translator, int batchStartsAt, int batchSize, A[] arg0) { checkBounds(arg0, batchStartsAt, batchSize); final long finalSequence = sequencer.next(batchSize); translateAndPublishBatch(translator, arg0, batchStartsAt, batchSize, finalSequence); }
[ "public", "<", "A", ">", "void", "publishEvents", "(", "EventTranslatorOneArg", "<", "E", ",", "A", ">", "translator", ",", "int", "batchStartsAt", ",", "int", "batchSize", ",", "A", "[", "]", "arg0", ")", "{", "checkBounds", "(", "arg0", ",", "batchStartsAt", ",", "batchSize", ")", ";", "final", "long", "finalSequence", "=", "sequencer", ".", "next", "(", "batchSize", ")", ";", "translateAndPublishBatch", "(", "translator", ",", "arg0", ",", "batchStartsAt", ",", "batchSize", ",", "finalSequence", ")", ";", "}" ]
Allows one user supplied argument per event. @param translator The user specified translation for each event @param batchStartsAt The first element of the array which is within the batch. @param batchSize The actual size of the batch @param arg0 An array of user supplied arguments, one element per event. @see #publishEvents(EventTranslator[])
[ "Allows", "one", "user", "supplied", "argument", "per", "event", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/utils/disruptor/RingBuffer.java#L584-L588
25,281
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/utils/disruptor/RingBuffer.java
RingBuffer.publishEvents
public <A, B, C> void publishEvents(EventTranslatorThreeArg<E, A, B, C> translator, int batchStartsAt, int batchSize, A[] arg0, B[] arg1, C[] arg2) { checkBounds(arg0, arg1, arg2, batchStartsAt, batchSize); final long finalSequence = sequencer.next(batchSize); translateAndPublishBatch(translator, arg0, arg1, arg2, batchStartsAt, batchSize, finalSequence); }
java
public <A, B, C> void publishEvents(EventTranslatorThreeArg<E, A, B, C> translator, int batchStartsAt, int batchSize, A[] arg0, B[] arg1, C[] arg2) { checkBounds(arg0, arg1, arg2, batchStartsAt, batchSize); final long finalSequence = sequencer.next(batchSize); translateAndPublishBatch(translator, arg0, arg1, arg2, batchStartsAt, batchSize, finalSequence); }
[ "public", "<", "A", ",", "B", ",", "C", ">", "void", "publishEvents", "(", "EventTranslatorThreeArg", "<", "E", ",", "A", ",", "B", ",", "C", ">", "translator", ",", "int", "batchStartsAt", ",", "int", "batchSize", ",", "A", "[", "]", "arg0", ",", "B", "[", "]", "arg1", ",", "C", "[", "]", "arg2", ")", "{", "checkBounds", "(", "arg0", ",", "arg1", ",", "arg2", ",", "batchStartsAt", ",", "batchSize", ")", ";", "final", "long", "finalSequence", "=", "sequencer", ".", "next", "(", "batchSize", ")", ";", "translateAndPublishBatch", "(", "translator", ",", "arg0", ",", "arg1", ",", "arg2", ",", "batchStartsAt", ",", "batchSize", ",", "finalSequence", ")", ";", "}" ]
Allows three user supplied arguments per event. @param translator The user specified translation for the event @param batchStartsAt The first element of the array which is within the batch. @param batchSize The number of elements in the batch. @param arg0 An array of user supplied arguments, one element per event. @param arg1 An array of user supplied arguments, one element per event. @param arg2 An array of user supplied arguments, one element per event. @see #publishEvents(EventTranslator[])
[ "Allows", "three", "user", "supplied", "arguments", "per", "event", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/utils/disruptor/RingBuffer.java#L710-L714
25,282
alibaba/jstorm
jstorm-core/src/main/java/storm/trident/TridentTopology.java
TridentTopology.getDRPCSpoutNode
private static SpoutNode getDRPCSpoutNode(Collection<Node> g) { for(Node n: g) { if(n instanceof SpoutNode) { SpoutNode.SpoutType type = ((SpoutNode) n).type; if(type==SpoutNode.SpoutType.DRPC) { return (SpoutNode) n; } } } return null; }
java
private static SpoutNode getDRPCSpoutNode(Collection<Node> g) { for(Node n: g) { if(n instanceof SpoutNode) { SpoutNode.SpoutType type = ((SpoutNode) n).type; if(type==SpoutNode.SpoutType.DRPC) { return (SpoutNode) n; } } } return null; }
[ "private", "static", "SpoutNode", "getDRPCSpoutNode", "(", "Collection", "<", "Node", ">", "g", ")", "{", "for", "(", "Node", "n", ":", "g", ")", "{", "if", "(", "n", "instanceof", "SpoutNode", ")", "{", "SpoutNode", ".", "SpoutType", "type", "=", "(", "(", "SpoutNode", ")", "n", ")", ".", "type", ";", "if", "(", "type", "==", "SpoutNode", ".", "SpoutType", ".", "DRPC", ")", "{", "return", "(", "SpoutNode", ")", "n", ";", "}", "}", "}", "return", "null", ";", "}" ]
returns null if it's not a drpc group
[ "returns", "null", "if", "it", "s", "not", "a", "drpc", "group" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/storm/trident/TridentTopology.java#L563-L573
25,283
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SandBoxMaker.java
SandBoxMaker.sandboxPolicy
public String sandboxPolicy(String workerId, Map<String, String> replaceMap) throws IOException { if (!isEnable) { return ""; } replaceMap.putAll(replaceBaseMap); String tmpPolicy = generatePolicyFile(replaceMap); File file = new File(tmpPolicy); String policyPath = StormConfig.worker_root(conf, workerId) + File.separator + SANBOX_TEMPLATE_NAME; File dest = new File(policyPath); file.renameTo(dest); StringBuilder sb = new StringBuilder(); sb.append(" -Djava.security.manager -Djava.security.policy="); sb.append(policyPath); return sb.toString(); }
java
public String sandboxPolicy(String workerId, Map<String, String> replaceMap) throws IOException { if (!isEnable) { return ""; } replaceMap.putAll(replaceBaseMap); String tmpPolicy = generatePolicyFile(replaceMap); File file = new File(tmpPolicy); String policyPath = StormConfig.worker_root(conf, workerId) + File.separator + SANBOX_TEMPLATE_NAME; File dest = new File(policyPath); file.renameTo(dest); StringBuilder sb = new StringBuilder(); sb.append(" -Djava.security.manager -Djava.security.policy="); sb.append(policyPath); return sb.toString(); }
[ "public", "String", "sandboxPolicy", "(", "String", "workerId", ",", "Map", "<", "String", ",", "String", ">", "replaceMap", ")", "throws", "IOException", "{", "if", "(", "!", "isEnable", ")", "{", "return", "\"\"", ";", "}", "replaceMap", ".", "putAll", "(", "replaceBaseMap", ")", ";", "String", "tmpPolicy", "=", "generatePolicyFile", "(", "replaceMap", ")", ";", "File", "file", "=", "new", "File", "(", "tmpPolicy", ")", ";", "String", "policyPath", "=", "StormConfig", ".", "worker_root", "(", "conf", ",", "workerId", ")", "+", "File", ".", "separator", "+", "SANBOX_TEMPLATE_NAME", ";", "File", "dest", "=", "new", "File", "(", "policyPath", ")", ";", "file", ".", "renameTo", "(", "dest", ")", ";", "StringBuilder", "sb", "=", "new", "StringBuilder", "(", ")", ";", "sb", ".", "append", "(", "\" -Djava.security.manager -Djava.security.policy=\"", ")", ";", "sb", ".", "append", "(", "policyPath", ")", ";", "return", "sb", ".", "toString", "(", ")", ";", "}" ]
Generate command string
[ "Generate", "command", "string" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/daemon/supervisor/SandBoxMaker.java#L151-L169
25,284
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/task/TopologyContext.java
TopologyContext.setSubscribedState
public <T extends ISubscribedState> T setSubscribedState(String componentId, T obj) { return setSubscribedState(componentId, Utils.DEFAULT_STREAM_ID, obj); }
java
public <T extends ISubscribedState> T setSubscribedState(String componentId, T obj) { return setSubscribedState(componentId, Utils.DEFAULT_STREAM_ID, obj); }
[ "public", "<", "T", "extends", "ISubscribedState", ">", "T", "setSubscribedState", "(", "String", "componentId", ",", "T", "obj", ")", "{", "return", "setSubscribedState", "(", "componentId", ",", "Utils", ".", "DEFAULT_STREAM_ID", ",", "obj", ")", ";", "}" ]
Synchronizes the default stream from the specified state spout component id with the provided ISubscribedState object. The recommended usage of this method is as follows: <pre> _myState = context.setSubscribedState(componentId, new MyState()); </pre> @param componentId the id of the StateSpout component to subscribe to @param obj Provided ISubscribedState implementation @return Returns the ISubscribedState object provided
[ "Synchronizes", "the", "default", "stream", "from", "the", "specified", "state", "spout", "component", "id", "with", "the", "provided", "ISubscribedState", "object", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/task/TopologyContext.java#L109-L111
25,285
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/task/TopologyContext.java
TopologyContext.getThisOutputFieldsForStreams
public Map<String, List<String>> getThisOutputFieldsForStreams() { Map<String, List<String>> streamToFields = new HashMap<>(); for (String stream : this.getThisStreams()) { streamToFields.put(stream, this.getThisOutputFields(stream).toList()); } return streamToFields; }
java
public Map<String, List<String>> getThisOutputFieldsForStreams() { Map<String, List<String>> streamToFields = new HashMap<>(); for (String stream : this.getThisStreams()) { streamToFields.put(stream, this.getThisOutputFields(stream).toList()); } return streamToFields; }
[ "public", "Map", "<", "String", ",", "List", "<", "String", ">", ">", "getThisOutputFieldsForStreams", "(", ")", "{", "Map", "<", "String", ",", "List", "<", "String", ">", ">", "streamToFields", "=", "new", "HashMap", "<>", "(", ")", ";", "for", "(", "String", "stream", ":", "this", ".", "getThisStreams", "(", ")", ")", "{", "streamToFields", ".", "put", "(", "stream", ",", "this", ".", "getThisOutputFields", "(", "stream", ")", ".", "toList", "(", ")", ")", ";", "}", "return", "streamToFields", ";", "}" ]
Gets the declared output fields for the specified stream id for the component this task is a part of.
[ "Gets", "the", "declared", "output", "fields", "for", "the", "specified", "stream", "id", "for", "the", "component", "this", "task", "is", "a", "part", "of", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/task/TopologyContext.java#L157-L163
25,286
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/task/master/metrics/MetricsUploader.java
MetricsUploader.process
@Override public void process(Object event) throws Exception { int secOffset = TimeUtils.secOffset(); if (secOffset < UPLOAD_TIME_OFFSET_SEC) { JStormUtils.sleepMs((UPLOAD_TIME_OFFSET_SEC - secOffset) * 1000); } else if (secOffset == UPLOAD_TIME_OFFSET_SEC) { // do nothing } else { JStormUtils.sleepMs((60 - secOffset + UPLOAD_TIME_OFFSET_SEC) * 1000); } if (topologyMetricContext.getUploadedWorkerNum() > 0) { metricLogger.info("force upload metrics."); mergeAndUploadMetrics(); } }
java
@Override public void process(Object event) throws Exception { int secOffset = TimeUtils.secOffset(); if (secOffset < UPLOAD_TIME_OFFSET_SEC) { JStormUtils.sleepMs((UPLOAD_TIME_OFFSET_SEC - secOffset) * 1000); } else if (secOffset == UPLOAD_TIME_OFFSET_SEC) { // do nothing } else { JStormUtils.sleepMs((60 - secOffset + UPLOAD_TIME_OFFSET_SEC) * 1000); } if (topologyMetricContext.getUploadedWorkerNum() > 0) { metricLogger.info("force upload metrics."); mergeAndUploadMetrics(); } }
[ "@", "Override", "public", "void", "process", "(", "Object", "event", ")", "throws", "Exception", "{", "int", "secOffset", "=", "TimeUtils", ".", "secOffset", "(", ")", ";", "if", "(", "secOffset", "<", "UPLOAD_TIME_OFFSET_SEC", ")", "{", "JStormUtils", ".", "sleepMs", "(", "(", "UPLOAD_TIME_OFFSET_SEC", "-", "secOffset", ")", "*", "1000", ")", ";", "}", "else", "if", "(", "secOffset", "==", "UPLOAD_TIME_OFFSET_SEC", ")", "{", "// do nothing", "}", "else", "{", "JStormUtils", ".", "sleepMs", "(", "(", "60", "-", "secOffset", "+", "UPLOAD_TIME_OFFSET_SEC", ")", "*", "1000", ")", ";", "}", "if", "(", "topologyMetricContext", ".", "getUploadedWorkerNum", "(", ")", ">", "0", ")", "{", "metricLogger", ".", "info", "(", "\"force upload metrics.\"", ")", ";", "mergeAndUploadMetrics", "(", ")", ";", "}", "}" ]
Wait UPLOAD_TIME_OFFSET_SEC sec to ensure we've collected enough metrics from topology workers, note that it's not guaranteed metrics from all workers will be collected. If we miss the offset, we'll sleep until the offset comes next minute.
[ "Wait", "UPLOAD_TIME_OFFSET_SEC", "sec", "to", "ensure", "we", "ve", "collected", "enough", "metrics", "from", "topology", "workers", "note", "that", "it", "s", "not", "guaranteed", "metrics", "from", "all", "workers", "will", "be", "collected", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/task/master/metrics/MetricsUploader.java#L77-L91
25,287
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/utils/LinuxResource.java
LinuxResource.getDiskUsage
public static Double getDiskUsage() { if (!OSInfo.isLinux() && !OSInfo.isMac()) { return 0.0; } try { String output = SystemOperation.exec("df -h " + duHome); if (output != null) { String[] lines = output.split("[\\r\\n]+"); if (lines.length >= 2) { String[] parts = lines[1].split("\\s+"); if (parts.length >= 5) { String pct = parts[4]; if (pct.endsWith("%")) { return Integer.valueOf(pct.substring(0, pct.length() - 1)) / 100.0; } } } } } catch (Exception e) { LOG.warn("failed to get disk usage."); } return 0.0; }
java
public static Double getDiskUsage() { if (!OSInfo.isLinux() && !OSInfo.isMac()) { return 0.0; } try { String output = SystemOperation.exec("df -h " + duHome); if (output != null) { String[] lines = output.split("[\\r\\n]+"); if (lines.length >= 2) { String[] parts = lines[1].split("\\s+"); if (parts.length >= 5) { String pct = parts[4]; if (pct.endsWith("%")) { return Integer.valueOf(pct.substring(0, pct.length() - 1)) / 100.0; } } } } } catch (Exception e) { LOG.warn("failed to get disk usage."); } return 0.0; }
[ "public", "static", "Double", "getDiskUsage", "(", ")", "{", "if", "(", "!", "OSInfo", ".", "isLinux", "(", ")", "&&", "!", "OSInfo", ".", "isMac", "(", ")", ")", "{", "return", "0.0", ";", "}", "try", "{", "String", "output", "=", "SystemOperation", ".", "exec", "(", "\"df -h \"", "+", "duHome", ")", ";", "if", "(", "output", "!=", "null", ")", "{", "String", "[", "]", "lines", "=", "output", ".", "split", "(", "\"[\\\\r\\\\n]+\"", ")", ";", "if", "(", "lines", ".", "length", ">=", "2", ")", "{", "String", "[", "]", "parts", "=", "lines", "[", "1", "]", ".", "split", "(", "\"\\\\s+\"", ")", ";", "if", "(", "parts", ".", "length", ">=", "5", ")", "{", "String", "pct", "=", "parts", "[", "4", "]", ";", "if", "(", "pct", ".", "endsWith", "(", "\"%\"", ")", ")", "{", "return", "Integer", ".", "valueOf", "(", "pct", ".", "substring", "(", "0", ",", "pct", ".", "length", "(", ")", "-", "1", ")", ")", "/", "100.0", ";", "}", "}", "}", "}", "}", "catch", "(", "Exception", "e", ")", "{", "LOG", ".", "warn", "(", "\"failed to get disk usage.\"", ")", ";", "}", "return", "0.0", ";", "}" ]
calculate the disk usage at current filesystem @return disk usage, from 0.0 ~ 1.0
[ "calculate", "the", "disk", "usage", "at", "current", "filesystem" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/utils/LinuxResource.java#L213-L235
25,288
alibaba/jstorm
jstorm-core/src/main/java/backtype/storm/StormSubmitter.java
StormSubmitter.submitTopologyWithProgressBar
public static void submitTopologyWithProgressBar(String name, Map stormConf, StormTopology topology, SubmitOptions opts) throws AlreadyAliveException, InvalidTopologyException { /** * progress bar is removed in jstorm */ submitTopology(name, stormConf, topology, opts); }
java
public static void submitTopologyWithProgressBar(String name, Map stormConf, StormTopology topology, SubmitOptions opts) throws AlreadyAliveException, InvalidTopologyException { /** * progress bar is removed in jstorm */ submitTopology(name, stormConf, topology, opts); }
[ "public", "static", "void", "submitTopologyWithProgressBar", "(", "String", "name", ",", "Map", "stormConf", ",", "StormTopology", "topology", ",", "SubmitOptions", "opts", ")", "throws", "AlreadyAliveException", ",", "InvalidTopologyException", "{", "/**\n * progress bar is removed in jstorm\n */", "submitTopology", "(", "name", ",", "stormConf", ",", "topology", ",", "opts", ")", ";", "}" ]
Submits a topology to run on the cluster with a progress bar. A topology runs forever or until explicitly killed. @param name the name of the storm. @param stormConf the topology-specific configuration. See {@link Config}. @param topology the processing to execute. @param opts to manipulate the starting of the topology @throws AlreadyAliveException if a topology with this name is already running @throws InvalidTopologyException if an invalid topology was submitted
[ "Submits", "a", "topology", "to", "run", "on", "the", "cluster", "with", "a", "progress", "bar", ".", "A", "topology", "runs", "forever", "or", "until", "explicitly", "killed", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/backtype/storm/StormSubmitter.java#L194-L200
25,289
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/transactional/state/KeyRangeState.java
KeyRangeState.classifyBatch
private Map<Integer, Map<K, V>> classifyBatch(Map<K, V> batch) { Map<Integer, Map<K, V>> classifiedBatch = new HashMap<Integer, Map<K, V>>(); for (Entry<K, V> entry : batch.entrySet()) { int keyRange = hash(entry.getKey()); Map<K, V> subBatch = classifiedBatch.get(keyRange); if (subBatch == null) { subBatch = new HashMap<K, V>(); classifiedBatch.put(keyRange, subBatch); } subBatch.put(entry.getKey(), entry.getValue()); } return classifiedBatch; }
java
private Map<Integer, Map<K, V>> classifyBatch(Map<K, V> batch) { Map<Integer, Map<K, V>> classifiedBatch = new HashMap<Integer, Map<K, V>>(); for (Entry<K, V> entry : batch.entrySet()) { int keyRange = hash(entry.getKey()); Map<K, V> subBatch = classifiedBatch.get(keyRange); if (subBatch == null) { subBatch = new HashMap<K, V>(); classifiedBatch.put(keyRange, subBatch); } subBatch.put(entry.getKey(), entry.getValue()); } return classifiedBatch; }
[ "private", "Map", "<", "Integer", ",", "Map", "<", "K", ",", "V", ">", ">", "classifyBatch", "(", "Map", "<", "K", ",", "V", ">", "batch", ")", "{", "Map", "<", "Integer", ",", "Map", "<", "K", ",", "V", ">", ">", "classifiedBatch", "=", "new", "HashMap", "<", "Integer", ",", "Map", "<", "K", ",", "V", ">", ">", "(", ")", ";", "for", "(", "Entry", "<", "K", ",", "V", ">", "entry", ":", "batch", ".", "entrySet", "(", ")", ")", "{", "int", "keyRange", "=", "hash", "(", "entry", ".", "getKey", "(", ")", ")", ";", "Map", "<", "K", ",", "V", ">", "subBatch", "=", "classifiedBatch", ".", "get", "(", "keyRange", ")", ";", "if", "(", "subBatch", "==", "null", ")", "{", "subBatch", "=", "new", "HashMap", "<", "K", ",", "V", ">", "(", ")", ";", "classifiedBatch", ".", "put", "(", "keyRange", ",", "subBatch", ")", ";", "}", "subBatch", ".", "put", "(", "entry", ".", "getKey", "(", ")", ",", "entry", ".", "getValue", "(", ")", ")", ";", "}", "return", "classifiedBatch", ";", "}" ]
classify batch into several sub-batches for each key range @param batch covers several key range @return sub-batches which map to different key range
[ "classify", "batch", "into", "several", "sub", "-", "batches", "for", "each", "key", "range" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/transactional/state/KeyRangeState.java#L175-L187
25,290
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/zk/Zookeeper.java
Zookeeper.mkClient
public CuratorFramework mkClient(Map conf, List<String> servers, Object port, String root, final WatcherCallBack watcher) { CuratorFramework fk = Utils.newCurator(conf, servers, port, root); fk.getCuratorListenable().addListener(new CuratorListener() { @Override public void eventReceived(CuratorFramework _fk, CuratorEvent e) throws Exception { if (e.getType().equals(CuratorEventType.WATCHED)) { WatchedEvent event = e.getWatchedEvent(); watcher.execute(event.getState(), event.getType(), event.getPath()); } } }); fk.getUnhandledErrorListenable().addListener(new UnhandledErrorListener() { @Override public void unhandledError(String msg, Throwable error) { String errmsg = "Unrecoverable zookeeper error, halting process: " + msg; LOG.error(errmsg, error); JStormUtils.halt_process(1, "Unrecoverable zookeeper error"); } }); fk.start(); return fk; }
java
public CuratorFramework mkClient(Map conf, List<String> servers, Object port, String root, final WatcherCallBack watcher) { CuratorFramework fk = Utils.newCurator(conf, servers, port, root); fk.getCuratorListenable().addListener(new CuratorListener() { @Override public void eventReceived(CuratorFramework _fk, CuratorEvent e) throws Exception { if (e.getType().equals(CuratorEventType.WATCHED)) { WatchedEvent event = e.getWatchedEvent(); watcher.execute(event.getState(), event.getType(), event.getPath()); } } }); fk.getUnhandledErrorListenable().addListener(new UnhandledErrorListener() { @Override public void unhandledError(String msg, Throwable error) { String errmsg = "Unrecoverable zookeeper error, halting process: " + msg; LOG.error(errmsg, error); JStormUtils.halt_process(1, "Unrecoverable zookeeper error"); } }); fk.start(); return fk; }
[ "public", "CuratorFramework", "mkClient", "(", "Map", "conf", ",", "List", "<", "String", ">", "servers", ",", "Object", "port", ",", "String", "root", ",", "final", "WatcherCallBack", "watcher", ")", "{", "CuratorFramework", "fk", "=", "Utils", ".", "newCurator", "(", "conf", ",", "servers", ",", "port", ",", "root", ")", ";", "fk", ".", "getCuratorListenable", "(", ")", ".", "addListener", "(", "new", "CuratorListener", "(", ")", "{", "@", "Override", "public", "void", "eventReceived", "(", "CuratorFramework", "_fk", ",", "CuratorEvent", "e", ")", "throws", "Exception", "{", "if", "(", "e", ".", "getType", "(", ")", ".", "equals", "(", "CuratorEventType", ".", "WATCHED", ")", ")", "{", "WatchedEvent", "event", "=", "e", ".", "getWatchedEvent", "(", ")", ";", "watcher", ".", "execute", "(", "event", ".", "getState", "(", ")", ",", "event", ".", "getType", "(", ")", ",", "event", ".", "getPath", "(", ")", ")", ";", "}", "}", "}", ")", ";", "fk", ".", "getUnhandledErrorListenable", "(", ")", ".", "addListener", "(", "new", "UnhandledErrorListener", "(", ")", "{", "@", "Override", "public", "void", "unhandledError", "(", "String", "msg", ",", "Throwable", "error", ")", "{", "String", "errmsg", "=", "\"Unrecoverable zookeeper error, halting process: \"", "+", "msg", ";", "LOG", ".", "error", "(", "errmsg", ",", "error", ")", ";", "JStormUtils", ".", "halt_process", "(", "1", ",", "\"Unrecoverable zookeeper error\"", ")", ";", "}", "}", ")", ";", "fk", ".", "start", "(", ")", ";", "return", "fk", ";", "}" ]
connect ZK, register watchers
[ "connect", "ZK", "register", "watchers" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/zk/Zookeeper.java#L62-L90
25,291
alibaba/jstorm
jstorm-hdfs/src/main/java/com/alibaba/jstorm/hdfs/blobstore/HdfsBlobStore.java
HdfsBlobStore.prepareInternal
protected void prepareInternal(Map conf, String overrideBase, Configuration hadoopConf) { this.conf = conf; if (overrideBase == null) { overrideBase = (String)conf.get(Config.BLOBSTORE_DIR); } if (overrideBase == null) { throw new RuntimeException("You must specify a blobstore directory for HDFS to use!"); } LOG.debug("directory is: {}", overrideBase); // try { // // if a HDFS keytab/principal have been supplied login, otherwise assume they are // // logged in already or running insecure HDFS. // String principal = (String) conf.get(Config.BLOBSTORE_HDFS_PRINCIPAL); // String keyTab = (String) conf.get(Config.BLOBSTORE_HDFS_KEYTAB); // // if (principal != null && keyTab != null) { // UserGroupInformation.loginUserFromKeytab(principal, keyTab); // } else { // if (principal == null && keyTab != null) { // throw new RuntimeException("You must specify an HDFS principal to go with the keytab!"); // // } else { // if (principal != null && keyTab == null) { // throw new RuntimeException("You must specify HDFS keytab go with the principal!"); // } // } // } // } catch (IOException e) { // throw new RuntimeException("Error logging in from keytab!", e); // } Path baseDir = new Path(overrideBase, BASE_BLOBS_DIR_NAME); try { if (hadoopConf != null) { _hbs = new HdfsBlobStoreImpl(baseDir, conf, hadoopConf); } else { _hbs = new HdfsBlobStoreImpl(baseDir, conf); } } catch (IOException e) { throw new RuntimeException(e); } // _localSubject = getHadoopUser(); }
java
protected void prepareInternal(Map conf, String overrideBase, Configuration hadoopConf) { this.conf = conf; if (overrideBase == null) { overrideBase = (String)conf.get(Config.BLOBSTORE_DIR); } if (overrideBase == null) { throw new RuntimeException("You must specify a blobstore directory for HDFS to use!"); } LOG.debug("directory is: {}", overrideBase); // try { // // if a HDFS keytab/principal have been supplied login, otherwise assume they are // // logged in already or running insecure HDFS. // String principal = (String) conf.get(Config.BLOBSTORE_HDFS_PRINCIPAL); // String keyTab = (String) conf.get(Config.BLOBSTORE_HDFS_KEYTAB); // // if (principal != null && keyTab != null) { // UserGroupInformation.loginUserFromKeytab(principal, keyTab); // } else { // if (principal == null && keyTab != null) { // throw new RuntimeException("You must specify an HDFS principal to go with the keytab!"); // // } else { // if (principal != null && keyTab == null) { // throw new RuntimeException("You must specify HDFS keytab go with the principal!"); // } // } // } // } catch (IOException e) { // throw new RuntimeException("Error logging in from keytab!", e); // } Path baseDir = new Path(overrideBase, BASE_BLOBS_DIR_NAME); try { if (hadoopConf != null) { _hbs = new HdfsBlobStoreImpl(baseDir, conf, hadoopConf); } else { _hbs = new HdfsBlobStoreImpl(baseDir, conf); } } catch (IOException e) { throw new RuntimeException(e); } // _localSubject = getHadoopUser(); }
[ "protected", "void", "prepareInternal", "(", "Map", "conf", ",", "String", "overrideBase", ",", "Configuration", "hadoopConf", ")", "{", "this", ".", "conf", "=", "conf", ";", "if", "(", "overrideBase", "==", "null", ")", "{", "overrideBase", "=", "(", "String", ")", "conf", ".", "get", "(", "Config", ".", "BLOBSTORE_DIR", ")", ";", "}", "if", "(", "overrideBase", "==", "null", ")", "{", "throw", "new", "RuntimeException", "(", "\"You must specify a blobstore directory for HDFS to use!\"", ")", ";", "}", "LOG", ".", "debug", "(", "\"directory is: {}\"", ",", "overrideBase", ")", ";", "// try {", "// // if a HDFS keytab/principal have been supplied login, otherwise assume they are", "// // logged in already or running insecure HDFS.", "// String principal = (String) conf.get(Config.BLOBSTORE_HDFS_PRINCIPAL);", "// String keyTab = (String) conf.get(Config.BLOBSTORE_HDFS_KEYTAB);", "//", "// if (principal != null && keyTab != null) {", "// UserGroupInformation.loginUserFromKeytab(principal, keyTab);", "// } else {", "// if (principal == null && keyTab != null) {", "// throw new RuntimeException(\"You must specify an HDFS principal to go with the keytab!\");", "//", "// } else {", "// if (principal != null && keyTab == null) {", "// throw new RuntimeException(\"You must specify HDFS keytab go with the principal!\");", "// }", "// }", "// }", "// } catch (IOException e) {", "// throw new RuntimeException(\"Error logging in from keytab!\", e);", "// }", "Path", "baseDir", "=", "new", "Path", "(", "overrideBase", ",", "BASE_BLOBS_DIR_NAME", ")", ";", "try", "{", "if", "(", "hadoopConf", "!=", "null", ")", "{", "_hbs", "=", "new", "HdfsBlobStoreImpl", "(", "baseDir", ",", "conf", ",", "hadoopConf", ")", ";", "}", "else", "{", "_hbs", "=", "new", "HdfsBlobStoreImpl", "(", "baseDir", ",", "conf", ")", ";", "}", "}", "catch", "(", "IOException", "e", ")", "{", "throw", "new", "RuntimeException", "(", "e", ")", ";", "}", "// _localSubject = getHadoopUser();", "}" ]
Allow a Hadoop Configuration to be passed for testing. If it's null then the hadoop configs must be in your classpath.
[ "Allow", "a", "Hadoop", "Configuration", "to", "be", "passed", "for", "testing", ".", "If", "it", "s", "null", "then", "the", "hadoop", "configs", "must", "be", "in", "your", "classpath", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-hdfs/src/main/java/com/alibaba/jstorm/hdfs/blobstore/HdfsBlobStore.java#L112-L153
25,292
alibaba/jstorm
jstorm-hdfs/src/main/java/com/alibaba/jstorm/hdfs/trident/HdfsState.java
HdfsState.getTxnRecord
private TxnRecord getTxnRecord(Path indexFilePath) throws IOException { Path tmpPath = tmpFilePath(indexFilePath.toString()); if (this.options.fs.exists(indexFilePath)) { return readTxnRecord(indexFilePath); } else if (this.options.fs.exists(tmpPath)) { return readTxnRecord(tmpPath); } return new TxnRecord(0, options.currentFile.toString(), 0); }
java
private TxnRecord getTxnRecord(Path indexFilePath) throws IOException { Path tmpPath = tmpFilePath(indexFilePath.toString()); if (this.options.fs.exists(indexFilePath)) { return readTxnRecord(indexFilePath); } else if (this.options.fs.exists(tmpPath)) { return readTxnRecord(tmpPath); } return new TxnRecord(0, options.currentFile.toString(), 0); }
[ "private", "TxnRecord", "getTxnRecord", "(", "Path", "indexFilePath", ")", "throws", "IOException", "{", "Path", "tmpPath", "=", "tmpFilePath", "(", "indexFilePath", ".", "toString", "(", ")", ")", ";", "if", "(", "this", ".", "options", ".", "fs", ".", "exists", "(", "indexFilePath", ")", ")", "{", "return", "readTxnRecord", "(", "indexFilePath", ")", ";", "}", "else", "if", "(", "this", ".", "options", ".", "fs", ".", "exists", "(", "tmpPath", ")", ")", "{", "return", "readTxnRecord", "(", "tmpPath", ")", ";", "}", "return", "new", "TxnRecord", "(", "0", ",", "options", ".", "currentFile", ".", "toString", "(", ")", ",", "0", ")", ";", "}" ]
Reads the last txn record from index file if it exists, if not from .tmp file if exists. @param indexFilePath the index file path @return the txn record from the index file or a default initial record. @throws IOException
[ "Reads", "the", "last", "txn", "record", "from", "index", "file", "if", "it", "exists", "if", "not", "from", ".", "tmp", "file", "if", "exists", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-hdfs/src/main/java/com/alibaba/jstorm/hdfs/trident/HdfsState.java#L470-L478
25,293
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/common/metric/codahale/EWMA.java
EWMA.tick
public void tick() { final long count = uncounted.sumThenReset(); final double instantRate = count / interval; if (initialized) { rate += (alpha * (instantRate - rate)); } else { rate = instantRate; initialized = true; } }
java
public void tick() { final long count = uncounted.sumThenReset(); final double instantRate = count / interval; if (initialized) { rate += (alpha * (instantRate - rate)); } else { rate = instantRate; initialized = true; } }
[ "public", "void", "tick", "(", ")", "{", "final", "long", "count", "=", "uncounted", ".", "sumThenReset", "(", ")", ";", "final", "double", "instantRate", "=", "count", "/", "interval", ";", "if", "(", "initialized", ")", "{", "rate", "+=", "(", "alpha", "*", "(", "instantRate", "-", "rate", ")", ")", ";", "}", "else", "{", "rate", "=", "instantRate", ";", "initialized", "=", "true", ";", "}", "}" ]
Mark the passage of time and decay the current rate accordingly.
[ "Mark", "the", "passage", "of", "time", "and", "decay", "the", "current", "rate", "accordingly", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/common/metric/codahale/EWMA.java#L94-L103
25,294
alibaba/jstorm
example/sequence-split-merge/src/main/java/org/apache/storm/starter/tools/Rankings.java
Rankings.pruneZeroCounts
public void pruneZeroCounts() { int i = 0; while (i < rankedItems.size()) { if (rankedItems.get(i).getCount() == 0) { rankedItems.remove(i); } else { i++; } } }
java
public void pruneZeroCounts() { int i = 0; while (i < rankedItems.size()) { if (rankedItems.get(i).getCount() == 0) { rankedItems.remove(i); } else { i++; } } }
[ "public", "void", "pruneZeroCounts", "(", ")", "{", "int", "i", "=", "0", ";", "while", "(", "i", "<", "rankedItems", ".", "size", "(", ")", ")", "{", "if", "(", "rankedItems", ".", "get", "(", "i", ")", ".", "getCount", "(", ")", "==", "0", ")", "{", "rankedItems", ".", "remove", "(", "i", ")", ";", "}", "else", "{", "i", "++", ";", "}", "}", "}" ]
Removes ranking entries that have a count of zero.
[ "Removes", "ranking", "entries", "that", "have", "a", "count", "of", "zero", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/example/sequence-split-merge/src/main/java/org/apache/storm/starter/tools/Rankings.java#L138-L147
25,295
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/blobstore/BlobStore.java
BlobStore.validateKey
public static final void validateKey(String key) throws IllegalArgumentException { if (StringUtils.isEmpty(key) || "..".equals(key) || ".".equals(key) || !KEY_PATTERN.matcher(key).matches()) { LOG.error("'{}' does not appear to be valid {}", key, KEY_PATTERN); throw new IllegalArgumentException(key+" does not appear to be a valid blob key"); } }
java
public static final void validateKey(String key) throws IllegalArgumentException { if (StringUtils.isEmpty(key) || "..".equals(key) || ".".equals(key) || !KEY_PATTERN.matcher(key).matches()) { LOG.error("'{}' does not appear to be valid {}", key, KEY_PATTERN); throw new IllegalArgumentException(key+" does not appear to be a valid blob key"); } }
[ "public", "static", "final", "void", "validateKey", "(", "String", "key", ")", "throws", "IllegalArgumentException", "{", "if", "(", "StringUtils", ".", "isEmpty", "(", "key", ")", "||", "\"..\"", ".", "equals", "(", "key", ")", "||", "\".\"", ".", "equals", "(", "key", ")", "||", "!", "KEY_PATTERN", ".", "matcher", "(", "key", ")", ".", "matches", "(", ")", ")", "{", "LOG", ".", "error", "(", "\"'{}' does not appear to be valid {}\"", ",", "key", ",", "KEY_PATTERN", ")", ";", "throw", "new", "IllegalArgumentException", "(", "key", "+", "\" does not appear to be a valid blob key\"", ")", ";", "}", "}" ]
Validates key checking for potentially harmful patterns @param key Key for the blob.
[ "Validates", "key", "checking", "for", "potentially", "harmful", "patterns" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/blobstore/BlobStore.java#L149-L154
25,296
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/blobstore/BlobStore.java
BlobStore.readBlobTo
public void readBlobTo(String key, OutputStream out) throws IOException, KeyNotFoundException { InputStreamWithMeta in = getBlob(key); if (in == null) { throw new IOException("Could not find " + key); } byte[] buffer = new byte[2048]; int len = 0; try{ while ((len = in.read(buffer)) > 0) { out.write(buffer, 0, len); } } finally { in.close(); out.flush(); } }
java
public void readBlobTo(String key, OutputStream out) throws IOException, KeyNotFoundException { InputStreamWithMeta in = getBlob(key); if (in == null) { throw new IOException("Could not find " + key); } byte[] buffer = new byte[2048]; int len = 0; try{ while ((len = in.read(buffer)) > 0) { out.write(buffer, 0, len); } } finally { in.close(); out.flush(); } }
[ "public", "void", "readBlobTo", "(", "String", "key", ",", "OutputStream", "out", ")", "throws", "IOException", ",", "KeyNotFoundException", "{", "InputStreamWithMeta", "in", "=", "getBlob", "(", "key", ")", ";", "if", "(", "in", "==", "null", ")", "{", "throw", "new", "IOException", "(", "\"Could not find \"", "+", "key", ")", ";", "}", "byte", "[", "]", "buffer", "=", "new", "byte", "[", "2048", "]", ";", "int", "len", "=", "0", ";", "try", "{", "while", "(", "(", "len", "=", "in", ".", "read", "(", "buffer", ")", ")", ">", "0", ")", "{", "out", ".", "write", "(", "buffer", ",", "0", ",", "len", ")", ";", "}", "}", "finally", "{", "in", ".", "close", "(", ")", ";", "out", ".", "flush", "(", ")", ";", "}", "}" ]
Reads the blob from the blob store and writes it into the output stream. @param key Key for the blob. @param out Output stream privilege for the blob. @throws IOException @throws KeyNotFoundException
[ "Reads", "the", "blob", "from", "the", "blob", "store", "and", "writes", "it", "into", "the", "output", "stream", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/blobstore/BlobStore.java#L217-L232
25,297
alibaba/jstorm
jstorm-core/src/main/java/com/alibaba/jstorm/blobstore/BlobStore.java
BlobStore.readBlob
public byte[] readBlob(String key) throws IOException, KeyNotFoundException{ ByteArrayOutputStream out = new ByteArrayOutputStream(); readBlobTo(key, out); byte[] bytes = out.toByteArray(); out.close(); return bytes; }
java
public byte[] readBlob(String key) throws IOException, KeyNotFoundException{ ByteArrayOutputStream out = new ByteArrayOutputStream(); readBlobTo(key, out); byte[] bytes = out.toByteArray(); out.close(); return bytes; }
[ "public", "byte", "[", "]", "readBlob", "(", "String", "key", ")", "throws", "IOException", ",", "KeyNotFoundException", "{", "ByteArrayOutputStream", "out", "=", "new", "ByteArrayOutputStream", "(", ")", ";", "readBlobTo", "(", "key", ",", "out", ")", ";", "byte", "[", "]", "bytes", "=", "out", ".", "toByteArray", "(", ")", ";", "out", ".", "close", "(", ")", ";", "return", "bytes", ";", "}" ]
Wrapper around readBlobTo which returns a ByteArray output stream. @param key Key for the blob. the read privilege for the blob. @return ByteArrayOutputStream @throws IOException @throws KeyNotFoundException
[ "Wrapper", "around", "readBlobTo", "which", "returns", "a", "ByteArray", "output", "stream", "." ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-core/src/main/java/com/alibaba/jstorm/blobstore/BlobStore.java#L243-L249
25,298
alibaba/jstorm
jstorm-hdfs/src/main/java/com/alibaba/jstorm/hdfs/spout/FileLock.java
FileLock.logProgress
private void logProgress(String fileOffset, boolean prefixNewLine) throws IOException { long now = System.currentTimeMillis(); LogEntry entry = new LogEntry(now, componentID, fileOffset); String line = entry.toString(); if(prefixNewLine) { lockFileStream.writeBytes(System.lineSeparator() + line); } else { lockFileStream.writeBytes(line); } lockFileStream.hflush(); lastEntry = entry; // update this only after writing to hdfs }
java
private void logProgress(String fileOffset, boolean prefixNewLine) throws IOException { long now = System.currentTimeMillis(); LogEntry entry = new LogEntry(now, componentID, fileOffset); String line = entry.toString(); if(prefixNewLine) { lockFileStream.writeBytes(System.lineSeparator() + line); } else { lockFileStream.writeBytes(line); } lockFileStream.hflush(); lastEntry = entry; // update this only after writing to hdfs }
[ "private", "void", "logProgress", "(", "String", "fileOffset", ",", "boolean", "prefixNewLine", ")", "throws", "IOException", "{", "long", "now", "=", "System", ".", "currentTimeMillis", "(", ")", ";", "LogEntry", "entry", "=", "new", "LogEntry", "(", "now", ",", "componentID", ",", "fileOffset", ")", ";", "String", "line", "=", "entry", ".", "toString", "(", ")", ";", "if", "(", "prefixNewLine", ")", "{", "lockFileStream", ".", "writeBytes", "(", "System", ".", "lineSeparator", "(", ")", "+", "line", ")", ";", "}", "else", "{", "lockFileStream", ".", "writeBytes", "(", "line", ")", ";", "}", "lockFileStream", ".", "hflush", "(", ")", ";", "lastEntry", "=", "entry", ";", "// update this only after writing to hdfs", "}" ]
partial writes of prior lines
[ "partial", "writes", "of", "prior", "lines" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-hdfs/src/main/java/com/alibaba/jstorm/hdfs/spout/FileLock.java#L79-L93
25,299
alibaba/jstorm
jstorm-hdfs/src/main/java/com/alibaba/jstorm/hdfs/spout/FileLock.java
FileLock.release
public void release() throws IOException { lockFileStream.close(); if(!fs.delete(lockFile, false)) { LOG.warn("Unable to delete lock file, Spout = {}", componentID); throw new IOException("Unable to delete lock file"); } LOG.debug("Released lock file {}. Spout {}", lockFile, componentID); }
java
public void release() throws IOException { lockFileStream.close(); if(!fs.delete(lockFile, false)) { LOG.warn("Unable to delete lock file, Spout = {}", componentID); throw new IOException("Unable to delete lock file"); } LOG.debug("Released lock file {}. Spout {}", lockFile, componentID); }
[ "public", "void", "release", "(", ")", "throws", "IOException", "{", "lockFileStream", ".", "close", "(", ")", ";", "if", "(", "!", "fs", ".", "delete", "(", "lockFile", ",", "false", ")", ")", "{", "LOG", ".", "warn", "(", "\"Unable to delete lock file, Spout = {}\"", ",", "componentID", ")", ";", "throw", "new", "IOException", "(", "\"Unable to delete lock file\"", ")", ";", "}", "LOG", ".", "debug", "(", "\"Released lock file {}. Spout {}\"", ",", "lockFile", ",", "componentID", ")", ";", "}" ]
Release lock by deleting file @throws IOException if lock file could not be deleted
[ "Release", "lock", "by", "deleting", "file" ]
5d6cde22dbca7df3d6e6830bf94f98a6639ab559
https://github.com/alibaba/jstorm/blob/5d6cde22dbca7df3d6e6830bf94f98a6639ab559/jstorm-hdfs/src/main/java/com/alibaba/jstorm/hdfs/spout/FileLock.java#L98-L105